Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add support for Python 3.13 #804

Merged
merged 9 commits into from
Feb 3, 2025
38 changes: 38 additions & 0 deletions .devcontainer/devcontainer.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
// For format details, see https://aka.ms/devcontainer.json. For config options, see the
// README at: https://github.com/devcontainers/templates/tree/main/src/python
{
"name": "Python 3",
// Or use a Dockerfile or Docker Compose file. More info: https://containers.dev/guide/dockerfile
// "image": "mcr.microsoft.com/devcontainers/python:3.9-bookworm",
// "image": "mcr.microsoft.com/devcontainers/python:3.10-bookworm",
// "image": "mcr.microsoft.com/devcontainers/python:3.11-bookworm",
// "image": "mcr.microsoft.com/devcontainers/python:3.12-bookworm",
// "image": "mcr.microsoft.com/devcontainers/python:1-3.12-bullseye",
// "image": "mcr.microsoft.com/devcontainers/python:3.13-bookworm",
"image": "mcr.microsoft.com/devcontainers/python:3.13-bullseye",

"features": {
"ghcr.io/hspaans/devcontainer-features/pytest:1": {},
"ghcr.io/devcontainers-extra/features/pylint:2": {},
"ghcr.io/devcontainers-extra/features/poetry:2": {}
},

// Features to add to the dev container. More info: https://containers.dev/features.
// "features": {},

// Use 'forwardPorts' to make a list of ports inside the container available locally.
// "forwardPorts": [],

// Use 'postCreateCommand' to run commands after the container is created.
"postCreateCommand": "git config --global core.autocrlf true && pip3 install --user -r requirements-dev.txt",

// Configure tool-specific properties.
"customizations": {
"vscode": {
"extensions": ["ms-python.python"]
}
}

// Uncomment to connect as root instead. More info: https://aka.ms/dev-containers-non-root.
// "remoteUser": "root"
}
2 changes: 1 addition & 1 deletion .github/workflows/build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ jobs:
strategy:
max-parallel: 5
matrix:
python-version: ["3.9", "3.10", "3.11", "3.12"]
python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"]

steps:
- uses: actions/checkout@v4
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/publish.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,10 +18,10 @@ jobs:
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Set up Python 3.12
- name: Set up Python 3.13
uses: actions/setup-python@v5
with:
python-version: 3.12
python-version: 3.13
- name: Install dependencies
run: |
python -m pip install --upgrade pip
Expand Down
7 changes: 6 additions & 1 deletion .vscode/settings.json
Original file line number Diff line number Diff line change
@@ -1,3 +1,8 @@
{
"editor.formatOnSave": true
"editor.formatOnSave": true,
"python.testing.pytestArgs": [
"tests"
],
"python.testing.unittestEnabled": false,
"python.testing.pytestEnabled": true
}
7 changes: 4 additions & 3 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -11,9 +11,9 @@ version = "1.2.0"
authors = [{name = "Microsoft", email = "[email protected]"}]
description = "Core component of the Microsoft Graph Python SDK"
dependencies = [
"microsoft-kiota-abstractions >=1.0.0,<2.0.0",
"microsoft-kiota-authentication-azure >=1.0.0,<2.0.0",
"microsoft-kiota-http >=1.0.0,<2.0.0",
"microsoft-kiota-abstractions >=1.8.0,<2.0.0",
"microsoft-kiota-authentication-azure >=1.8.0,<2.0.0",
"microsoft-kiota-http >=1.8.0,<2.0.0",
"httpx[http2] >=0.23.0",
]
requires-python = ">=3.9"
Expand All @@ -26,6 +26,7 @@ classifiers = [
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
"Programming Language :: Python :: 3.13",
"License :: OSI Approved :: MIT License",
]

Expand Down
8 changes: 4 additions & 4 deletions requirements-dev.txt
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ pycparser==2.22

pyjwt[crypto]==2.9.0 ; python_version >= '3.7'

pylint==3.2.7
pylint==3.3.3

pyproject-hooks==1.2.0 ; python_version >= '3.7'

Expand Down Expand Up @@ -145,11 +145,11 @@ httpx[http2]==0.28.1

hyperframe==6.0.1 ; python_full_version >= '3.6.1'

microsoft-kiota-abstractions==1.7.1
microsoft-kiota-abstractions==1.8.0

microsoft-kiota-authentication-azure==1.7.1
microsoft-kiota-authentication-azure==1.8.0

microsoft-kiota-http==1.7.1
microsoft-kiota-http==1.8.0

multidict==6.1.0 ; python_version >= '3.7'

Expand Down
3 changes: 1 addition & 2 deletions src/msgraph_core/models/large_file_upload_session.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,8 @@
from __future__ import annotations

import datetime
from collections.abc import Callable
from dataclasses import dataclass, field
from typing import Any, Optional
from typing import Any, Callable, Optional

from kiota_abstractions.serialization import (
AdditionalDataHolder,
Expand Down
3 changes: 1 addition & 2 deletions src/msgraph_core/models/page_result.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,9 +11,8 @@
"""
from __future__ import annotations

from collections.abc import Callable
from dataclasses import dataclass
from typing import Optional, TypeVar
svrooij marked this conversation as resolved.
Show resolved Hide resolved
from typing import Callable, Optional, TypeVar

from kiota_abstractions.serialization.parsable import Parsable
from kiota_abstractions.serialization.parse_node import ParseNode
Expand Down
3 changes: 1 addition & 2 deletions src/msgraph_core/models/upload_result.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
from collections.abc import Callable
from dataclasses import dataclass
from datetime import datetime
from typing import Any, Generic, Optional, TypeVar
from typing import Any, Callable, Generic, Optional, TypeVar

from kiota_abstractions.serialization import (
AdditionalDataHolder,
Expand Down
21 changes: 13 additions & 8 deletions src/msgraph_core/requests/batch_response_content.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
import base64
from collections.abc import Callable
from io import BytesIO
from typing import Optional, Type, TypeVar, Union
from typing import Callable, Optional, Type, TypeVar, Union

from kiota_abstractions.serialization import (
Parsable,
Expand Down Expand Up @@ -108,17 +107,20 @@ def response_body(self, request_id: str, type: Type[T]) -> Optional[T]:
raise ValueError(f"No response found for id: {request_id}")

if not issubclass(type, Parsable):
raise ValueError("Type passed must implement the Parsable interface")
raise ValueError(
"Type passed must implement the Parsable interface")

response = self.get_response_by_id(request_id)
if response is not None:
content_type = response.content_type
else:
raise ValueError(
f"Unable to get content-type header in response item for request Id: {request_id}"
f"Unable to get content-type header in response item for request Id: {
request_id}"
)
if not content_type:
raise RuntimeError("Unable to get content-type header in response item")
raise RuntimeError(
"Unable to get content-type header in response item")

response_body = response.body or BytesIO()
try:
Expand All @@ -128,15 +130,17 @@ def response_body(self, request_id: str, type: Type[T]) -> Optional[T]:
)
except Exception:
response_body.seek(0)
base64_decoded_body = BytesIO(base64.b64decode(response_body.read()))
base64_decoded_body = BytesIO(
base64.b64decode(response_body.read()))
parse_node = ParseNodeFactoryRegistry().get_root_parse_node(
content_type, base64_decoded_body
)
response.body = base64_decoded_body
return parse_node.get_object_value(type)
except Exception:
raise ValueError(
f"Unable to deserialize batch response for request Id: {request_id} to {type}"
f"Unable to deserialize batch response for request Id: {
request_id} to {type}"
)

def get_field_deserializers(self) -> dict[str, Callable[[ParseNode], None]]:
Expand All @@ -161,7 +165,8 @@ def serialize(self, writer: SerializationWriter) -> None:
:param writer: The writer to write to
"""
if self._responses is not None:
writer.write_collection_of_object_values('responses', list(self._responses.values()))
writer.write_collection_of_object_values(
'responses', list(self._responses.values()))
else:
writer.write_collection_of_object_values('responses', [])

Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
from collections.abc import Callable

from typing import Callable
from kiota_abstractions.serialization import Parsable, ParseNode, SerializationWriter

from .batch_response_content import BatchResponseContent
Expand Down Expand Up @@ -52,7 +51,8 @@ async def responses_status_codes(self) -> dict[str, int]:
else:
raise ValueError("Response ID cannot be None")
else:
raise TypeError("Invalid type: Collection must be of type BatchResponseContent")
raise TypeError(
"Invalid type: Collection must be of type BatchResponseContent")
return status_codes

def get_field_deserializers(self) -> dict[str, Callable[[ParseNode], None]]:
Expand All @@ -65,7 +65,8 @@ def get_field_deserializers(self) -> dict[str, Callable[[ParseNode], None]]:
return {
'responses':
lambda n:
setattr(self, "_responses", n.get_collection_of_object_values(BatchResponseItem))
setattr(self, "_responses",
n.get_collection_of_object_values(BatchResponseItem))
}

def serialize(self, writer: SerializationWriter) -> None:
Expand Down
41 changes: 26 additions & 15 deletions src/msgraph_core/tasks/large_file_upload.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,9 @@
import logging
import os
from asyncio import Future
from collections.abc import Callable
from datetime import datetime, timedelta, timezone
from io import BytesIO
from typing import Any, Optional, Tuple, TypeVar, Union
from typing import Any, Callable, Optional, Tuple, TypeVar, Union

from kiota_abstractions.headers_collection import HeadersCollection
from kiota_abstractions.method import Method
Expand Down Expand Up @@ -38,11 +37,13 @@ def __init__(
self.max_chunk_size = max_chunk_size
self.factory = parsable_factory
cleaned_value = self.check_value_exists(
upload_session, 'get_next_expected_range', ['next_expected_range', 'NextExpectedRange']
upload_session, 'get_next_expected_range', [
'next_expected_range', 'NextExpectedRange']
)
self.next_range = cleaned_value[0]
self._chunks = int((self.file_size / max_chunk_size) + 0.5)
self.on_chunk_upload_complete: Optional[Callable[[list[int]], None]] = None
self.on_chunk_upload_complete: Optional[Callable[[
list[int]], None]] = None

@property
def upload_session(self):
Expand All @@ -68,7 +69,8 @@ def upload_session_expired(self, upload_session: Optional[Parsable] = None) -> b
now = datetime.now(timezone.utc)
upload_session = upload_session or self.upload_session
if not hasattr(upload_session, "expiration_date_time"):
raise ValueError("Upload session does not have an expiration date time")
raise ValueError(
"Upload session does not have an expiration date time")
expiry = getattr(upload_session, 'expiration_date_time')
if expiry is None:
raise ValueError("Expiry is None")
Expand All @@ -93,13 +95,16 @@ async def upload(self, after_chunk_upload: Optional[Callable] = None):

self.on_chunk_upload_complete = after_chunk_upload or self.on_chunk_upload_complete
session: LargeFileUploadSession = await self.next_chunk(
self.stream, 0, max(0, min(self.max_chunk_size - 1, self.file_size - 1))
self.stream, 0, max(
0, min(self.max_chunk_size - 1, self.file_size - 1))
)
process_next = session
# determine the range to be uploaded
# even when resuming existing upload sessions.
range_parts = self.next_range[0].split("-") if self.next_range else ['0', '0']
end = min(int(range_parts[0]) + self.max_chunk_size - 1, self.file_size)
range_parts = self.next_range[0].split(
"-") if self.next_range else ['0', '0']
end = min(int(range_parts[0]) +
self.max_chunk_size - 1, self.file_size)
uploaded_range = [range_parts[0], end]
response = None

Expand All @@ -124,12 +129,13 @@ async def upload(self, after_chunk_upload: Optional[Callable] = None):
if not next_range:
continue
range_parts = str(next_range[0]).split("-")
end = min(int(range_parts[0]) + self.max_chunk_size, self.file_size)
end = min(int(range_parts[0]) +
self.max_chunk_size, self.file_size)
uploaded_range = [range_parts[0], end]
self.next_range = next_range[0] + "-"
process_next = await self.next_chunk(self.stream)

except Exception as error: #pylint: disable=broad-except
except Exception as error: # pylint: disable=broad-except
logging.error("Error uploading chunk %s", error)
finally:
self.chunks -= 1
Expand Down Expand Up @@ -176,7 +182,8 @@ async def next_chunk(
chunk_data = file.read(end - start + 1)
info.headers = HeadersCollection()

info.headers.try_add('Content-Range', f'bytes {start}-{end}/{self.file_size}')
info.headers.try_add(
'Content-Range', f'bytes {start}-{end}/{self.file_size}')
info.headers.try_add('Content-Length', str(len(chunk_data)))
info.headers.try_add("Content-Type", "application/octet-stream")
info.set_stream_content(bytes(chunk_data))
Expand Down Expand Up @@ -216,7 +223,8 @@ async def last_chunk(
chunk_data = file.read(end - start + 1)
info.headers = HeadersCollection()

info.headers.try_add('Content-Range', f'bytes {start}-{end}/{self.file_size}')
info.headers.try_add(
'Content-Range', f'bytes {start}-{end}/{self.file_size}')
info.headers.try_add('Content-Length', str(len(chunk_data)))
info.headers.try_add("Content-Type", "application/octet-stream")
info.set_stream_content(bytes(chunk_data))
Expand All @@ -231,7 +239,8 @@ def get_file(self) -> BytesIO:

async def cancel(self) -> Parsable:
upload_url = self.get_validated_upload_url(self.upload_session)
request_information = RequestInformation(method=Method.DELETE, url_template=upload_url)
request_information = RequestInformation(
method=Method.DELETE, url_template=upload_url)

await self.request_adapter.send_no_response_content_async(request_information)

Expand All @@ -254,7 +263,8 @@ def additional_data_contains(self, parsable: Parsable,
'AdditionalDataHolder'
)
if not hasattr(parsable, 'additional_data'):
raise ValueError('The object passed does not contain an additional_data property')
raise ValueError(
'The object passed does not contain an additional_data property')
additional_data = parsable.additional_data
for property_candidate in property_candidates:
if property_candidate in additional_data:
Expand Down Expand Up @@ -298,7 +308,8 @@ async def resume(self) -> Future:

def get_validated_upload_url(self, upload_session: Parsable) -> str:
if not hasattr(upload_session, 'upload_url'):
raise RuntimeError('The upload session does not contain a valid upload url')
raise RuntimeError(
'The upload session does not contain a valid upload url')
result = upload_session.upload_url

if result is None or result.strip() == '':
Expand Down
Loading
Loading