# GitLab API

















*Version: 1.0.46*
Pythonic GitLab API Library
Includes a large portion of useful API calls to GitLab and SQLAlchemy Models to handle loading API calls directly to a database!
This repository is actively maintained - Contributions are welcome!
Additional Features:
- All responses are returned as native Pydantic models
- Save Pydantic models to pickle files locally
- Easily convert Pydantic to SQLAlchemy models for quick database insertion
### API Calls:
- Branches
- Commits
- Deploy Tokens
- Groups
- Jobs
- Members
- Merge Request
- Merge Request Rules
- Namespaces
- Packages
- Pipeline
- Projects
- Protected Branches
- Releases
- Runners
- Users
- Wiki
- Custom Endpoint
If your API call isn't supported, you can always run the standard custom API endpoint function to get/post/put/delete and endpoint
<details>
<summary><b>Usage:</b></summary>
Using the API directly
```python
#!/usr/bin/python
import gitlab_api
from gitlab_api import pydantic_to_sqlalchemy, upsert, save_model, load_model
from gitlab_api.gitlab_db_models import (
BaseDBModel as Base,
)
import urllib3
import os
from urllib.parse import quote_plus
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
gitlab_token = os.environ["GITLAB_TOKEN"]
postgres_username = os.environ["POSTGRES_USERNAME"]
postgres_password = os.environ["POSTGRES_PASSWORD"]
postgres_db_host = os.environ["POSTGRES_DB_HOST"]
postgres_port = os.environ["POSTGRES_PORT"]
postgres_db_name = os.environ["POSTGRES_DB_NAME"]
if __name__ == "__main__":
print("Creating GitLab Client...")
client = gitlab_api.Api(
url="http://gitlab.arpa/api/v4/",
token=gitlab_token,
verify=False,
)
print("GitLab Client Created\n\n")
print("\nFetching User Data...")
user_response = client.get_users(active=True, humans=True)
print(
f"Users ({len(user_response.data)}) Fetched - "
f"Status: {user_response.status_code}\n"
)
print("\nFetching Namespace Data...")
namespace_response = client.get_namespaces()
print(
f"Namespaces ({len(namespace_response.data)}) Fetched - "
f"Status: {namespace_response.status_code}\n"
)
print("\nFetching Project Data...")
project_response = client.get_nested_projects_by_group(group_id=2, per_page=100)
print(
f"Projects ({len(project_response.data)}) Fetched - "
f"Status: {project_response.status_code}\n"
)
print("\nFetching Merge Request Data...")
merge_request_response = client.get_group_merge_requests(
argument="state=all", group_id=2
)
print(
f"\nMerge Requests ({len(merge_request_response.data)}) Fetched - "
f"Status: {merge_request_response.status_code}\n"
)
# Pipeline Jobs table
pipeline_job_response = None
for project in project_response.data:
job_response = client.get_project_jobs(project_id=project.id)
if (
not pipeline_job_response
and hasattr(job_response, "data")
and len(job_response.data) > 0
):
pipeline_job_response = job_response
elif (
pipeline_job_response
and hasattr(job_response, "data")
and len(job_response.data) > 0
):
pipeline_job_response.data.extend(job_response.data)
print(
f"Pipeline Jobs ({len(getattr(pipeline_job_response, 'data', []))}) "
f"Fetched for Project ({project.id}) - "
f"Status: {pipeline_job_response.status_code}\n"
)
print("Saving Pydantic Models...")
user_file = save_model(model=user_response, file_name="user_model", file_path=".")
namespace_file = save_model(
model=namespace_response, file_name="namespace_model", file_path="."
)
project_file = save_model(
model=project_response, file_name="project_model", file_path="."
)
merge_request_file = save_model(
model=merge_request_response, file_name="merge_request_model", file_path="."
)
pipeline_job_file = save_model(
model=pipeline_job_response, file_name="pipeline_job_model", file_path="."
)
print("Models Saved")
print("Loading Pydantic Models...")
user_response = load_model(file=user_file)
namespace_response = load_model(file=namespace_file)
project_response = load_model(file=project_file)
merge_request_response = load_model(file=merge_request_file)
pipeline_job_response = load_model(file=pipeline_job_file)
print("Models Loaded")
print("Converting Pydantic to SQLAlchemy model...")
user_db_model = pydantic_to_sqlalchemy(schema=user_response)
print(f"Database Models: {user_db_model}\n")
print("Converting Pydantic to SQLAlchemy model...")
namespace_db_model = pydantic_to_sqlalchemy(schema=namespace_response)
print(f"Database Models: {namespace_db_model}\n")
print("Converting Pydantic to SQLAlchemy model...")
project_db_model = pydantic_to_sqlalchemy(schema=project_response)
print(f"Database Models: {project_db_model}\n")
print("Converting Pydantic to SQLAlchemy model...")
merge_request_db_model = pydantic_to_sqlalchemy(schema=merge_request_response)
print(f"Database Models: {merge_request_db_model}\n")
print("Converting Pydantic to SQLAlchemy model...")
pipeline_db_model = pydantic_to_sqlalchemy(schema=pipeline_job_response)
print(f"Database Models: {pipeline_db_model}\n")
print("Creating Engine")
engine = create_engine(
f"postgresql://{postgres_username}:{quote_plus(postgres_password)}@"
f"{postgres_db_host}:{postgres_port}/{postgres_db_name}"
)
print("Engine Created\n\n")
print("Creating Tables...")
Base.metadata.create_all(engine)
print("Tables Created\n\n")
print("Creating Session...")
Session = sessionmaker(bind=engine)
session = Session()
print("Session Created\n\n")
print(f"Inserting ({len(user_response.data)}) Users Into Database...")
upsert(session=session, model=user_db_model)
print("Users Synchronization Complete!\n")
print(f"Inserting ({len(namespace_response.data)}) Namespaces Into Database...")
upsert(session=session, model=namespace_db_model)
print("Namespaces Synchronization Complete!\n")
print(f"Inserting ({len(project_response.data)}) Projects Into Database...\n")
upsert(session=session, model=project_db_model)
print("Projects Synchronization Complete!\n")
print(
f"Inserting ({len(merge_request_response.data)}) Merge Requests Into Database..."
)
upsert(session=session, model=merge_request_db_model)
print("Merge Request Synchronization Complete!\n")
print(
f"Inserting ({len(pipeline_job_response.data)}) Pipeline Jobs Into Database..."
)
upsert(session=session, model=pipeline_db_model)
print("Pipeline Jobs Synchronization Complete!\n")
session.close()
print("Session Closed")
```
</details>
<details>
<summary><b>Installation Instructions:</b></summary>
Install Python Package
```bash
python -m pip install gitlab-api
```
</details>
<details>
<summary><b>Tests:</b></summary>
pre-commit check
```bash
pre-commit run --all-files
```
pytest
```bash
python -m pip install -r test-requirements.txt
pytest ./test/test_gitlab_models.py
```
Full pytests
```bash
rm -rf ./dist/* \
&& python setup.py bdist_wheel --universal \
&& python -m pip uninstall gitlab-api -y \
&& python -m pip install ./dist/*.whl \
&& pytest -vv ./test/test_gitlab_models.py \
&& pytest -vv ./test/test_gitlab_db_models.py \
&& python ./test/test_sqlalchemy.py
```
</details>
<details>
<summary><b>Repository Owners:</b></summary>
<img width="100%" height="180em" src="https://github-readme-stats.vercel.app/api?username=Knucklessg1&show_icons=true&hide_border=true&&count_private=true&include_all_commits=true" />


</details>
Raw data
{
"_id": null,
"home_page": "https://github.com/Knuckles-Team/gitlab-api",
"name": "gitlab-api",
"maintainer": null,
"docs_url": null,
"requires_python": null,
"maintainer_email": null,
"keywords": null,
"author": "Audel Rouhi",
"author_email": "knucklessg1@gmail.com",
"download_url": null,
"platform": null,
"description": "# GitLab API\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n*Version: 1.0.46*\n\nPythonic GitLab API Library\n\nIncludes a large portion of useful API calls to GitLab and SQLAlchemy Models to handle loading API calls directly to a database!\n\nThis repository is actively maintained - Contributions are welcome!\n\nAdditional Features:\n- All responses are returned as native Pydantic models\n- Save Pydantic models to pickle files locally\n- Easily convert Pydantic to SQLAlchemy models for quick database insertion\n\n\n### API Calls:\n- Branches\n- Commits\n- Deploy Tokens\n- Groups\n- Jobs\n- Members\n- Merge Request\n- Merge Request Rules\n- Namespaces\n- Packages\n- Pipeline\n- Projects\n- Protected Branches\n- Releases\n- Runners\n- Users\n- Wiki\n- Custom Endpoint\n\nIf your API call isn't supported, you can always run the standard custom API endpoint function to get/post/put/delete and endpoint\n\n\n<details>\n <summary><b>Usage:</b></summary>\n\nUsing the API directly\n\n```python\n#!/usr/bin/python\n\nimport gitlab_api\nfrom gitlab_api import pydantic_to_sqlalchemy, upsert, save_model, load_model\nfrom gitlab_api.gitlab_db_models import (\n BaseDBModel as Base,\n)\nimport urllib3\nimport os\nfrom urllib.parse import quote_plus\n\nfrom sqlalchemy import create_engine\nfrom sqlalchemy.orm import sessionmaker\n\nurllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)\n\ngitlab_token = os.environ[\"GITLAB_TOKEN\"]\npostgres_username = os.environ[\"POSTGRES_USERNAME\"]\npostgres_password = os.environ[\"POSTGRES_PASSWORD\"]\npostgres_db_host = os.environ[\"POSTGRES_DB_HOST\"]\npostgres_port = os.environ[\"POSTGRES_PORT\"]\npostgres_db_name = os.environ[\"POSTGRES_DB_NAME\"]\n\n\nif __name__ == \"__main__\":\n print(\"Creating GitLab Client...\")\n client = gitlab_api.Api(\n url=\"http://gitlab.arpa/api/v4/\",\n token=gitlab_token,\n verify=False,\n )\n print(\"GitLab Client Created\\n\\n\")\n\n print(\"\\nFetching User Data...\")\n user_response = client.get_users(active=True, humans=True)\n print(\n f\"Users ({len(user_response.data)}) Fetched - \"\n f\"Status: {user_response.status_code}\\n\"\n )\n\n print(\"\\nFetching Namespace Data...\")\n namespace_response = client.get_namespaces()\n print(\n f\"Namespaces ({len(namespace_response.data)}) Fetched - \"\n f\"Status: {namespace_response.status_code}\\n\"\n )\n\n print(\"\\nFetching Project Data...\")\n project_response = client.get_nested_projects_by_group(group_id=2, per_page=100)\n print(\n f\"Projects ({len(project_response.data)}) Fetched - \"\n f\"Status: {project_response.status_code}\\n\"\n )\n\n print(\"\\nFetching Merge Request Data...\")\n merge_request_response = client.get_group_merge_requests(\n argument=\"state=all\", group_id=2\n )\n\n print(\n f\"\\nMerge Requests ({len(merge_request_response.data)}) Fetched - \"\n f\"Status: {merge_request_response.status_code}\\n\"\n )\n\n # Pipeline Jobs table\n pipeline_job_response = None\n for project in project_response.data:\n job_response = client.get_project_jobs(project_id=project.id)\n if (\n not pipeline_job_response\n and hasattr(job_response, \"data\")\n and len(job_response.data) > 0\n ):\n pipeline_job_response = job_response\n elif (\n pipeline_job_response\n and hasattr(job_response, \"data\")\n and len(job_response.data) > 0\n ):\n pipeline_job_response.data.extend(job_response.data)\n print(\n f\"Pipeline Jobs ({len(getattr(pipeline_job_response, 'data', []))}) \"\n f\"Fetched for Project ({project.id}) - \"\n f\"Status: {pipeline_job_response.status_code}\\n\"\n )\n\n print(\"Saving Pydantic Models...\")\n user_file = save_model(model=user_response, file_name=\"user_model\", file_path=\".\")\n namespace_file = save_model(\n model=namespace_response, file_name=\"namespace_model\", file_path=\".\"\n )\n project_file = save_model(\n model=project_response, file_name=\"project_model\", file_path=\".\"\n )\n merge_request_file = save_model(\n model=merge_request_response, file_name=\"merge_request_model\", file_path=\".\"\n )\n pipeline_job_file = save_model(\n model=pipeline_job_response, file_name=\"pipeline_job_model\", file_path=\".\"\n )\n print(\"Models Saved\")\n\n print(\"Loading Pydantic Models...\")\n user_response = load_model(file=user_file)\n namespace_response = load_model(file=namespace_file)\n project_response = load_model(file=project_file)\n merge_request_response = load_model(file=merge_request_file)\n pipeline_job_response = load_model(file=pipeline_job_file)\n print(\"Models Loaded\")\n\n print(\"Converting Pydantic to SQLAlchemy model...\")\n user_db_model = pydantic_to_sqlalchemy(schema=user_response)\n print(f\"Database Models: {user_db_model}\\n\")\n\n print(\"Converting Pydantic to SQLAlchemy model...\")\n namespace_db_model = pydantic_to_sqlalchemy(schema=namespace_response)\n print(f\"Database Models: {namespace_db_model}\\n\")\n\n print(\"Converting Pydantic to SQLAlchemy model...\")\n project_db_model = pydantic_to_sqlalchemy(schema=project_response)\n print(f\"Database Models: {project_db_model}\\n\")\n\n print(\"Converting Pydantic to SQLAlchemy model...\")\n merge_request_db_model = pydantic_to_sqlalchemy(schema=merge_request_response)\n print(f\"Database Models: {merge_request_db_model}\\n\")\n\n print(\"Converting Pydantic to SQLAlchemy model...\")\n pipeline_db_model = pydantic_to_sqlalchemy(schema=pipeline_job_response)\n print(f\"Database Models: {pipeline_db_model}\\n\")\n\n print(\"Creating Engine\")\n engine = create_engine(\n f\"postgresql://{postgres_username}:{quote_plus(postgres_password)}@\"\n f\"{postgres_db_host}:{postgres_port}/{postgres_db_name}\"\n )\n print(\"Engine Created\\n\\n\")\n\n print(\"Creating Tables...\")\n Base.metadata.create_all(engine)\n print(\"Tables Created\\n\\n\")\n\n print(\"Creating Session...\")\n Session = sessionmaker(bind=engine)\n session = Session()\n print(\"Session Created\\n\\n\")\n\n print(f\"Inserting ({len(user_response.data)}) Users Into Database...\")\n upsert(session=session, model=user_db_model)\n print(\"Users Synchronization Complete!\\n\")\n\n print(f\"Inserting ({len(namespace_response.data)}) Namespaces Into Database...\")\n upsert(session=session, model=namespace_db_model)\n print(\"Namespaces Synchronization Complete!\\n\")\n\n print(f\"Inserting ({len(project_response.data)}) Projects Into Database...\\n\")\n upsert(session=session, model=project_db_model)\n print(\"Projects Synchronization Complete!\\n\")\n\n print(\n f\"Inserting ({len(merge_request_response.data)}) Merge Requests Into Database...\"\n )\n upsert(session=session, model=merge_request_db_model)\n print(\"Merge Request Synchronization Complete!\\n\")\n\n print(\n f\"Inserting ({len(pipeline_job_response.data)}) Pipeline Jobs Into Database...\"\n )\n upsert(session=session, model=pipeline_db_model)\n print(\"Pipeline Jobs Synchronization Complete!\\n\")\n\n session.close()\n print(\"Session Closed\")\n\n```\n\n</details>\n\n<details>\n <summary><b>Installation Instructions:</b></summary>\n\nInstall Python Package\n\n```bash\npython -m pip install gitlab-api\n```\n\n</details>\n\n<details>\n <summary><b>Tests:</b></summary>\n\npre-commit check\n```bash\npre-commit run --all-files\n```\n\npytest\n```bash\npython -m pip install -r test-requirements.txt\npytest ./test/test_gitlab_models.py\n```\n\nFull pytests\n\n```bash\nrm -rf ./dist/* \\\n&& python setup.py bdist_wheel --universal \\\n&& python -m pip uninstall gitlab-api -y \\\n&& python -m pip install ./dist/*.whl \\\n&& pytest -vv ./test/test_gitlab_models.py \\\n&& pytest -vv ./test/test_gitlab_db_models.py \\\n&& python ./test/test_sqlalchemy.py\n```\n</details>\n\n\n<details>\n <summary><b>Repository Owners:</b></summary>\n\n\n<img width=\"100%\" height=\"180em\" src=\"https://github-readme-stats.vercel.app/api?username=Knucklessg1&show_icons=true&hide_border=true&&count_private=true&include_all_commits=true\" />\n\n\n\n</details>\n",
"bugtrack_url": null,
"license": "MIT",
"summary": "GitLab API Python Wrapper",
"version": "1.0.46",
"project_urls": {
"Homepage": "https://github.com/Knuckles-Team/gitlab-api"
},
"split_keywords": [],
"urls": [
{
"comment_text": "",
"digests": {
"blake2b_256": "39a312bb28fafbe860f87a100f18f85cb88c807db3b5b2ea9efa5e7a50221548",
"md5": "8bd77180845a5026149d4eb02c329d53",
"sha256": "cd3685285bba582dded4eef80ef025ee3c782222164a3109a844c47f2241001b"
},
"downloads": -1,
"filename": "gitlab_api-1.0.46-py2.py3-none-any.whl",
"has_sig": false,
"md5_digest": "8bd77180845a5026149d4eb02c329d53",
"packagetype": "bdist_wheel",
"python_version": "py2.py3",
"requires_python": null,
"size": 61678,
"upload_time": "2025-02-13T00:27:47",
"upload_time_iso_8601": "2025-02-13T00:27:47.301831Z",
"url": "https://files.pythonhosted.org/packages/39/a3/12bb28fafbe860f87a100f18f85cb88c807db3b5b2ea9efa5e7a50221548/gitlab_api-1.0.46-py2.py3-none-any.whl",
"yanked": false,
"yanked_reason": null
}
],
"upload_time": "2025-02-13 00:27:47",
"github": true,
"gitlab": false,
"bitbucket": false,
"codeberg": false,
"github_user": "Knuckles-Team",
"github_project": "gitlab-api",
"travis_ci": false,
"coveralls": false,
"github_actions": true,
"requirements": [
{
"name": "requests",
"specs": [
[
">=",
"2.8.1"
]
]
},
{
"name": "urllib3",
"specs": [
[
">=",
"2.2.2"
]
]
},
{
"name": "pydantic",
"specs": [
[
">=",
"2.8.2"
]
]
},
{
"name": "SQLAlchemy",
"specs": [
[
">=",
"2.0.36"
]
]
},
{
"name": "alembic",
"specs": [
[
">=",
"1.14.1"
]
]
}
],
"lcname": "gitlab-api"
}