diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml new file mode 100644 index 0000000..ff50eba --- /dev/null +++ b/.github/workflows/docker-image.yml @@ -0,0 +1,43 @@ +name: build_docker + +on: + push: + branches: + - main + - dev + release: + types: [created] # 表示在创建新的 Release 时触发 + +jobs: + build_docker: + name: Build docker + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v3 + + - run: | + echo "本次构建的版本为:${GITHUB_REF_NAME} (但是这个变量目前上下文中无法获取到)" + echo 本次构建的版本为:${{ github.ref_name }} + env + + - name: Set up QEMU + uses: docker/setup-qemu-action@v2 + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v2 + - name: Login to DockerHub + uses: docker/login-action@v2 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + - name: Build and push + id: docker_build + uses: docker/build-push-action@v4 + with: + context: . + push: true + labels: ${{ steps.meta.outputs.labels }} + platforms: linux/amd64,linux/arm64 + tags: | + ${{ secrets.DOCKERHUB_USERNAME }}/cocopilot-chatgpt:${{ github.ref_name }} + ${{ secrets.DOCKERHUB_USERNAME }}/cocopilot-chatgpt:latest diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..3d6de38 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,11 @@ +FROM python:3.7-alpine +RUN sed -i 's/dl-cdn.alpinelinux.org/mirrors.aliyun.com/g' /etc/apk/repositories +RUN apk add --no-cache musl-dev openssl-dev libffi-dev tzdata gcc ttf-dejavu +RUN cp /usr/share/zoneinfo/Asia/Shanghai /etc/localtime +COPY . /app/ +WORKDIR /app +RUN pip3 install --upgrade pip +RUN pip3 install --no-cache-dir --upgrade -r requirements.txt + +EXPOSE 8080 +CMD ["python3", "main.py"] diff --git a/README.md b/README.md new file mode 100644 index 0000000..893e813 --- /dev/null +++ b/README.md @@ -0,0 +1,67 @@ +# cocopilot + +![GitHub repo size](https://img.shields.io/github/repo-size/caoyunzhou/cocopilot-gpt) +![Docker Image Size (tag)](https://img.shields.io/docker/image-size/caoyunzhou/cocopilot-chatgpt/latest) +![Docker Pulls](https://img.shields.io/docker/pulls/caoyunzhou/cocopilot-chatgpt) +[![GitHub Repo stars](https://img.shields.io/github/stars/caoyunzhou/cocopilot-gpt?style=social)](https://github.com/caoyunzhou/cocopilot-gpt/stargazers) + +- 这个项目提供了一个快速简便的方式来使用cocopilot to chatgpt4 +- `重点强调自己部署,降低风控` +- [[copilot](https://github.com/settings/copilot)](https://github.com/settings/copilot) 是一个免费的AI应用,让你可以和 GPT 模型聊天。让它可以通过一个 HTTP API 来访问,这个 API 模仿了官方的 OpenAI API for ChatGPT,所以它可以和其他使用 OpenAI API for ChatGPT 的程序兼容。 + +## 使用条件,获取copilot token GHO-xxx,GHU-xxx + +- 你需要登录到github开通copilot的功能才能用 +- 通过下面的地址便捷的获取 `GHO-xxx,GHU-xxx` + - [fakeopen by pengzhile](https://cocopilot.org/copilot/token) + +## 广告推广 + +- 使用企业github拼车copilot使用 +- 低于官方**10$/月**: 可以联系QQ号:496618601 + +### Docker部署 + +- docker run快速开始: + +```shell + docker run -d \ + --name cocopilot-chatgpt \ + -p 8080:8080 \ + caoyunzhou/cocopilot-chatgpt +``` + +### Railway部署 + +- [注册Railway](https://railway.app?referralCode=CG56Re) +- [![Deploy on Railway](https://railway.app/button.svg)](https://railway.app/template/UhhP8o?referralCode=CG56Re) + +### 使用方式 + +- IP访问 + +```shell +curl --location 'http://127.0.0.1:8080/v1/chat/completions' \ +--header 'Content-Type: application/json' \ +--header 'Authorization: Bearer gho_xxx' \ +--data '{ + "model": "gpt-4", + "messages": [{"role": "user", "content": "hi"}] +}' +``` + +- 域名访问 + +```shell +curl --location 'https://cocopilot.aivvm.com/v1/chat/completions' \ +--header 'Content-Type: application/json' \ +--header 'Authorization: Bearer gho_xxx' \ +--data '{ + "model": "gpt-4", + "messages": [{"role": "user", "content": "hi"}] +}' +``` + +### Star History + +[![Star History Chart](https://api.star-history.com/svg?repos=caoyunzhou/cocopilot-gpt&type=Date)](https://star-history.com/#caoyunzhou/cocopilot-gpt&Date) diff --git a/main.py b/main.py new file mode 100644 index 0000000..f284002 --- /dev/null +++ b/main.py @@ -0,0 +1,101 @@ +import requests +from flask import Flask, request, Response, jsonify +import uuid +import datetime +import hashlib + +app = Flask(__name__) + +machine_id = hashlib.sha256(str(uuid.uuid4()).encode()).hexdigest() + +def forward_request(GHO_TOKEN: str, stream: bool, json_data): + + headers = { + 'Host': 'api.github.com', + 'authorization': f'token {GHO_TOKEN}', + "Editor-Version": "vscode/1.84.2", + "Editor-Plugin-Version": "copilot/1.138.0", + "User-Agent": "GithubCopilot/1.138.0", + "Accept": "*/*", + "Accept-Encoding": "gzip, deflate, br", + "Connection": "close" + } + + response = requests.get( + 'https://api.github.com/copilot_internal/v2/token', headers=headers) + print("Auth:",response.text) + if response.status_code == 200 and response.json(): + access_token = response.json()['token'] + + acc_headers = { + 'Authorization': f'Bearer {access_token}', + 'X-Request-Id': str(uuid.uuid4()), + 'Vscode-Sessionid': str(uuid.uuid4()) + str(int(datetime.datetime.utcnow().timestamp() * 1000)), + 'vscode-machineid': machine_id, + 'Editor-Version': 'vscode/1.84.2', + 'Editor-Plugin-Version': 'copilot-chat/0.10.2', + 'Openai-Organization': 'github-copilot', + 'Openai-Intent': 'conversation-panel', + 'Content-Type': 'application/json', + 'User-Agent': 'GitHubCopilotChat/0.10.2', + 'Accept': '*/*', + 'Accept-Encoding': 'gzip, deflate, br', + } + + resp = requests.post('https://api.githubcopilot.com/chat/completions', headers=acc_headers, json=json_data, stream=stream) + return resp.iter_content(chunk_size=8192) if stream else resp.json() + else: + # print(response.text) + return response + + +@app.route('/v1/chat/completions', methods=['POST']) +def proxy(): + # 从请求中获取json数据 + json_data = request.get_json() + if json_data is None: + return "Request body is missing or not in JSON format", 400 + # 获取Authorization头部信息 + GHO_TOKEN = request.headers.get('Authorization') + GHO_TOKEN = GHO_TOKEN.split(' ')[1] + print("Secret:", GHO_TOKEN) + print("Message:", json_data) + if GHO_TOKEN is None: + return "Authorization header is missing", 401 + + # Check if stream option is set in the request data + stream = json_data.get('stream', False) + + # 转发请求并获取响应 + resp = forward_request(GHO_TOKEN, stream, json_data) + # 处理流式输出 + + return Response(resp, mimetype='application/json') if stream else resp + + +@app.route('/v1/models', methods=['GET']) +def models(): + data = { + "object": "list", + "data": [ + {"id": "gpt-4-0314", "object": "model", "created": 1687882410, + "owned_by": "openai", "root": "gpt-4-0314", "parent": None}, + {"id": "gpt-4-0613", "object": "model", "created": 1686588896, + "owned_by": "openai", "root": "gpt-4-0613", "parent": None}, + {"id": "gpt-4", "object": "model", "created": 1687882411, + "owned_by": "openai", "root": "gpt-4", "parent": None}, + {"id": "gpt-3.5-turbo", "object": "model", "created": 1677610602, + "owned_by": "openai", "root": "gpt-3.5-turbo", "parent": None}, + {"id": "gpt-3.5-turbo-0301", "object": "model", "created": 1677649963, + "owned_by": "openai", "root": "gpt-3.5-turbo-0301", "parent": None}, + ] + } + return jsonify(data) + + +if __name__ == '__main__': + app.run(host='0.0.0.0', port=8080, debug=False) + + +# GHO_TOKEN = "gho_xx" +# set_access_token(get_token(GHO_TOKEN)['token']) diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..bef81fe --- /dev/null +++ b/requirements.txt @@ -0,0 +1,3 @@ +requests==2.25.1 +Flask==2.0.2 +Jinja2==3.1.2