name: Comment PRs on: pull_request_target: jobs: comment-pr: env: MODEL_NAME: hermes-2-theta-llama-3-8b runs-on: ubuntu-latest steps: - name: Checkout code uses: actions/checkout@v3 with: ref: "${{ github.event.pull_request.merge_commit_sha }}" fetch-depth: 0 # needed to checkout all branches for this Action to work - uses: mudler/localai-github-action@v1 with: model: 'hermes-2-theta-llama-3-8b' # Any from models.localai.io, or from huggingface.com with: "huggingface:///file" # Check the PR diff using the current branch and the base branch of the PR - uses: GrantBirki/git-diff-action@v2.7.0 id: git-diff-action with: json_diff_file_output: diff.json raw_diff_file_output: diff.txt file_output_only: "true" base_branch: ${{ github.event.pull_request.base.sha }} - name: Show diff env: DIFF: ${{ steps.git-diff-action.outputs.raw-diff-path }} run: | cat $DIFF - name: Summarize env: DIFF: ${{ steps.git-diff-action.outputs.raw-diff-path }} id: summarize run: | input="$(cat $DIFF)" # Define the LocalAI API endpoint API_URL="http://localhost:8080/chat/completions" # Create a JSON payload using jq to handle special characters json_payload=$(jq -n --arg input "$input" '{ model: "'$MODEL_NAME'", messages: [ { role: "system", content: "You are LocalAI-bot in Github that helps understanding PRs and assess complexity. Explain what has changed in this PR diff and why" }, { role: "user", content: $input } ] }') # Send the request to LocalAI response=$(curl -s -X POST $API_URL \ -H "Content-Type: application/json" \ -d "$json_payload") # Extract the summary from the response summary="$(echo $response | jq -r '.choices[0].message.content')" # Print the summary # -H "Authorization: Bearer $API_KEY" \ echo "Summary:" echo "$summary" echo "payload sent" echo "$json_payload" { echo 'message<> "$GITHUB_OUTPUT" docker logs --tail 10 local-ai - uses: mshick/add-pr-comment@v2 if: always() with: repo-token: ${{ secrets.UPDATE_BOT_TOKEN }} message: ${{ steps.summarize.outputs.message }} message-failure: | Uh oh! Could not analyze this PR, maybe it's too big?