mirror of
https://github.com/qodo-ai/pr-agent.git
synced 2025-12-11 18:35:18 +00:00
Merge branch 'main' into pr/2046
# Conflicts: # requirements.txt
This commit is contained in:
commit
40633d7a9e
4 changed files with 19 additions and 11 deletions
|
|
@ -39,6 +39,13 @@ Generate the token and add it to .secret.toml under `bitbucket_server` section
|
|||
bearer_token = "<your key>"
|
||||
```
|
||||
|
||||
Don't forget to also set the URL of your Bitbucket Server instance (either in `.secret.toml` or in `configuration.toml`):
|
||||
|
||||
```toml
|
||||
[bitbucket_server]
|
||||
url = "<full URL to your Bitbucket instance, e.g.: https://git.bitbucket.com>"
|
||||
```
|
||||
|
||||
### Run it as CLI
|
||||
|
||||
Modify `configuration.toml`:
|
||||
|
|
@ -47,6 +54,8 @@ Modify `configuration.toml`:
|
|||
git_provider="bitbucket_server"
|
||||
```
|
||||
|
||||
|
||||
|
||||
and pass the Pull request URL:
|
||||
|
||||
```shell
|
||||
|
|
|
|||
|
|
@ -329,12 +329,13 @@ async def retry_with_fallback_models(f: Callable, model_type: ModelType = ModelT
|
|||
)
|
||||
get_settings().set("openai.deployment_id", deployment_id)
|
||||
return await f(model)
|
||||
except:
|
||||
except Exception as e:
|
||||
get_logger().warning(
|
||||
f"Failed to generate prediction with {model}"
|
||||
f"Failed to generate prediction with {model}",
|
||||
artifact={"error": e},
|
||||
)
|
||||
if i == len(all_models) - 1: # If it's the last iteration
|
||||
raise Exception(f"Failed to generate prediction with any model of {all_models}")
|
||||
raise Exception(f"Failed to generate prediction with any model of {all_models}") from e
|
||||
|
||||
|
||||
def _get_all_models(model_type: ModelType = ModelType.REGULAR) -> List[str]:
|
||||
|
|
|
|||
|
|
@ -376,7 +376,6 @@ def get_main_pr_language(languages, files) -> str:
|
|||
break
|
||||
except Exception as e:
|
||||
get_logger().exception(f"Failed to get main language: {e}")
|
||||
pass
|
||||
|
||||
## old approach:
|
||||
# most_common_extension = max(set(extension_list), key=extension_list.count)
|
||||
|
|
@ -401,7 +400,6 @@ def get_main_pr_language(languages, files) -> str:
|
|||
|
||||
except Exception as e:
|
||||
get_logger().exception(e)
|
||||
pass
|
||||
|
||||
return main_language_str
|
||||
|
||||
|
|
|
|||
|
|
@ -1,19 +1,19 @@
|
|||
aiohttp==3.10.2
|
||||
aiohttp==3.12.15
|
||||
anthropic>=0.69.0
|
||||
#anthropic[vertex]==0.47.1
|
||||
atlassian-python-api==3.41.4
|
||||
azure-devops==7.1.0b3
|
||||
azure-identity==1.15.0
|
||||
boto3==1.33.6
|
||||
azure-identity==1.25.0
|
||||
boto3==1.40.45
|
||||
certifi==2024.8.30
|
||||
dynaconf==3.2.4
|
||||
fastapi==0.115.6
|
||||
fastapi==0.118.0
|
||||
GitPython==3.1.41
|
||||
google-cloud-aiplatform==1.38.0
|
||||
google-generativeai==0.8.3
|
||||
google-cloud-storage==2.10.0
|
||||
Jinja2==3.1.2
|
||||
litellm==1.73.6
|
||||
Jinja2==3.1.6
|
||||
litellm==1.77.7
|
||||
loguru==0.7.2
|
||||
msrest==0.7.1
|
||||
openai>=1.55.3
|
||||
|
|
|
|||
Loading…
Reference in a new issue