From 5c5015b2671ae3221cbd9833aecb757ba93208ba Mon Sep 17 00:00:00 2001 From: Gadi Zimerman Date: Tue, 18 Jul 2023 14:45:15 +0300 Subject: [PATCH 1/4] Update README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 0728bbce..9165af8f 100644 --- a/README.md +++ b/README.md @@ -132,7 +132,7 @@ Here are several ways to install and run PR-Agent: ## How it works -![PR-Agent Tools](https://www.codium.ai/wp-content/uploads/2023/07/pr-agent-schema-updated.png) +![PR-Agent Tools](https://www.codium.ai/wp-content/uploads/2023/07/codiumai-diagram-v4.jpg) Check out the [PR Compression strategy](./PR_COMPRESSION.md) page for more details on how we convert a code diff to a manageable LLM prompt From 77f243b7abcb4287b4fb535da35ae47f1618369d Mon Sep 17 00:00:00 2001 From: zmeir Date: Tue, 18 Jul 2023 16:22:41 +0300 Subject: [PATCH 2/4] Allow passing CLI args (helps with debugging) --- pr_agent/cli.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pr_agent/cli.py b/pr_agent/cli.py index ca9d5db0..d6af7cf6 100644 --- a/pr_agent/cli.py +++ b/pr_agent/cli.py @@ -10,7 +10,7 @@ from pr_agent.tools.pr_questions import PRQuestions from pr_agent.tools.pr_reviewer import PRReviewer -def run(): +def run(args=None): parser = argparse.ArgumentParser(description='AI based pull request analyzer', usage="""\ Usage: cli.py --pr-url []. For example: @@ -35,7 +35,7 @@ reflect - Ask the PR author questions about the PR. 'reflect', 'review_after_reflect'], default='review') parser.add_argument('rest', nargs=argparse.REMAINDER, default=[]) - args = parser.parse_args() + args = parser.parse_args(args) logging.basicConfig(level=os.environ.get("LOGLEVEL", "INFO")) command = args.command.lower() if command in ['ask', 'ask_question']: From dc67e6a66e1f8915504881cd073f6890341e106d Mon Sep 17 00:00:00 2001 From: Yuval Goldberg Date: Tue, 18 Jul 2023 17:46:42 +0300 Subject: [PATCH 3/4] Support deploying pr-agent on AWS Lambda --- INSTALL.md | 29 +++++++++++++++++++++++------ docker/Dockerfile.lambda | 12 ++++++++++++ pr_agent/servers/serverless.py | 18 ++++++++++++++++++ 3 files changed, 53 insertions(+), 6 deletions(-) create mode 100644 docker/Dockerfile.lambda create mode 100644 pr_agent/servers/serverless.py diff --git a/INSTALL.md b/INSTALL.md index 40d4cb58..04ccdc65 100644 --- a/INSTALL.md +++ b/INSTALL.md @@ -149,16 +149,15 @@ git clone https://github.com/Codium-ai/pr-agent.git ``` 5. Copy the secrets template file and fill in the following: + ``` + cp pr_agent/settings/.secrets_template.toml pr_agent/settings/.secrets.toml + # Edit .secrets.toml file + ``` - Your OpenAI key. - - Set deployment_type to 'app' - Copy your app's private key to the private_key field. - Copy your app's ID to the app_id field. - Copy your app's webhook secret to the webhook_secret field. - -``` -cp pr_agent/settings/.secrets_template.toml pr_agent/settings/.secrets.toml -# Edit .secrets.toml file -``` + - Set deployment_type to 'app' in [configuration.toml](./pr_agent/settings/configuration.toml) 6. Build a Docker image for the app and optionally push it to a Docker repository. We'll use Dockerhub as an example: @@ -169,6 +168,7 @@ docker push codiumai/pr-agent:github_app # Push to your Docker repository 7. Host the app using a server, serverless function, or container environment. Alternatively, for development and debugging, you may use tools like smee.io to forward webhooks to your local machine. + You can check [Deploy as a Lambda Function](#deploy-as-a-lambda-function) 8. Go back to your app's settings, and set the following: @@ -178,3 +178,20 @@ docker push codiumai/pr-agent:github_app # Push to your Docker repository 9. Install the app by navigating to the "Install App" tab and selecting your desired repositories. --- + +#### Deploy as a Lambda Function + +1. Follow steps 1-5 of [Method 5](#method-5-run-as-a-github-app). +2. Build a docker image that can be used as a lambda function + ```shell + docker buildx build --platform=linux/amd64 . -t codiumai/pr-agent:serverless -f docker/Dockerfile.lambda + ``` +3. Push image to ECR + ```shell + docker tag codiumai/pr-agent:serverless .dkr.ecr..amazonaws.com/codiumai/pr-agent:serverless + docker push .dkr.ecr..amazonaws.com/codiumai/pr-agent:serverless + ``` +4. Create a lambda function that uses the uploaded image. Set the lambda timeout to be at least 3m. +5. Configure the lambda function to have a Function URL. +6. Go back to steps 8-9 of [Method 5](#method-5-run-as-a-github-app) with the function url as your Webhook URL. + The Webhook URL would look like `https:///api/v1/github_webhooks` diff --git a/docker/Dockerfile.lambda b/docker/Dockerfile.lambda new file mode 100644 index 00000000..6c0d677d --- /dev/null +++ b/docker/Dockerfile.lambda @@ -0,0 +1,12 @@ +FROM public.ecr.aws/lambda/python:3.10 + +RUN yum update -y && \ + yum install -y gcc python3-devel && \ + yum clean all + +ADD requirements.txt . +RUN pip install -r requirements.txt && rm requirements.txt +RUN pip install mangum +COPY pr_agent/ ${LAMBDA_TASK_ROOT}/pr_agent/ + +CMD ["pr_agent.servers.serverless.serverless"] diff --git a/pr_agent/servers/serverless.py b/pr_agent/servers/serverless.py new file mode 100644 index 00000000..42178431 --- /dev/null +++ b/pr_agent/servers/serverless.py @@ -0,0 +1,18 @@ +import logging + +from fastapi import FastAPI +from mangum import Mangum + +from pr_agent.servers.github_app import router + +logger = logging.getLogger() +logger.setLevel(logging.DEBUG) + +app = FastAPI() +app.include_router(router) + +handler = Mangum(app, lifespan="off") + + +def serverless(event, context): + return handler(event, context) From 370520df514641a75465e97e990450aabeab5caa Mon Sep 17 00:00:00 2001 From: Yuval Goldberg Date: Wed, 19 Jul 2023 11:05:24 +0300 Subject: [PATCH 4/4] Update docker/Dockerfile.lambda have a fixed mangum version Co-authored-by: Ori Kotek --- docker/Dockerfile.lambda | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/Dockerfile.lambda b/docker/Dockerfile.lambda index 6c0d677d..404c8529 100644 --- a/docker/Dockerfile.lambda +++ b/docker/Dockerfile.lambda @@ -6,7 +6,7 @@ RUN yum update -y && \ ADD requirements.txt . RUN pip install -r requirements.txt && rm requirements.txt -RUN pip install mangum +RUN pip install mangum==16.0.0 COPY pr_agent/ ${LAMBDA_TASK_ROOT}/pr_agent/ CMD ["pr_agent.servers.serverless.serverless"]