Skip to content

Commit 93e4534

Browse files
committed
more noqas
1 parent 192e905 commit 93e4534

File tree

13 files changed

+56
-34
lines changed

13 files changed

+56
-34
lines changed

airbyte_cdk/cli/source_declarative_manifest/_run.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -72,7 +72,7 @@ def __init__(
7272
super().__init__(
7373
catalog=catalog,
7474
config=config,
75-
state=state,
75+
state=state, # type: ignore [arg-type]
7676
path_to_yaml="manifest.yaml",
7777
)
7878

airbyte_cdk/connector_builder/message_grouper.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -274,7 +274,7 @@ def _get_message_groups(
274274
if message.trace.type == TraceType.ERROR: # type: ignore[union-attr] # AirbyteMessage with MessageType.TRACE has trace.type
275275
yield message.trace
276276
elif message.type == MessageType.RECORD:
277-
current_page_records.append(message.record.data) # type: ignore[union-attr] # AirbyteMessage with MessageType.RECORD has record.data
277+
current_page_records.append(message.record.data) # type: ignore[arg-type, union-attr] # AirbyteMessage with MessageType.RECORD has record.data
278278
records_count += 1
279279
schema_inferrer.accumulate(message.record)
280280
datetime_format_inferrer.accumulate(message.record)
@@ -355,7 +355,7 @@ def _close_page(
355355
StreamReadPages(
356356
request=current_page_request,
357357
response=current_page_response,
358-
records=deepcopy(current_page_records),
358+
records=deepcopy(current_page_records), # type: ignore [arg-type]
359359
) # type: ignore
360360
)
361361
current_page_records.clear()

airbyte_cdk/destinations/vector_db_based/writer.py

Lines changed: 12 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -83,14 +83,22 @@ def write(
8383
yield message
8484
elif message.type == Type.RECORD:
8585
record_chunks, record_id_to_delete = self.processor.process(message.record)
86-
self.chunks[(message.record.namespace, message.record.stream)].extend(record_chunks)
86+
self.chunks[
87+
( # type: ignore [index] # expected "tuple[str, str]", got "tuple[str | Any | None, str | Any]"
88+
message.record.namespace, # type: ignore [union-attr] # record not None
89+
message.record.stream, # type: ignore [union-attr] # record not None
90+
)
91+
].extend(record_chunks)
8792
if record_id_to_delete is not None:
8893
if message.record is None:
8994
raise ValueError("Record messages cannot have null `record` property.")
9095

91-
self.ids_to_delete[(message.record.namespace, message.record.stream)].append(
92-
record_id_to_delete
93-
)
96+
self.ids_to_delete[
97+
( # type: ignore [index] # expected "tuple[str, str]", got "tuple[str | Any | None, str | Any]"
98+
message.record.namespace, # type: ignore [union-attr] # record not None
99+
message.record.stream, # type: ignore [union-attr] # record not None
100+
)
101+
].append(record_id_to_delete)
94102
self.number_of_chunks += len(record_chunks)
95103
if self.number_of_chunks >= self.batch_size:
96104
self._process_batch()

airbyte_cdk/sources/declarative/datetime/min_max_datetime.py

Lines changed: 9 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -41,12 +41,12 @@ def __post_init__(self, parameters: Mapping[str, Any]) -> None:
4141
self.datetime = InterpolatedString.create(self.datetime, parameters=parameters or {})
4242
self._parser = DatetimeParser()
4343
self.min_datetime = (
44-
InterpolatedString.create(self.min_datetime, parameters=parameters)
44+
InterpolatedString.create(self.min_datetime, parameters=parameters) # type: ignore [assignment] # expression has type "InterpolatedString | None", variable has type "InterpolatedString | str"
4545
if self.min_datetime
4646
else None
4747
) # type: ignore
4848
self.max_datetime = (
49-
InterpolatedString.create(self.max_datetime, parameters=parameters)
49+
InterpolatedString.create(self.max_datetime, parameters=parameters) # type: ignore [assignment] # expression has type "InterpolatedString | None", variable has type "InterpolatedString | str"
5050
if self.max_datetime
5151
else None
5252
) # type: ignore
@@ -66,7 +66,13 @@ def get_datetime(
6666
datetime_format = "%Y-%m-%dT%H:%M:%S.%f%z"
6767

6868
time = self._parser.parse(
69-
str(self.datetime.eval(config, **additional_parameters)), datetime_format
69+
str(
70+
self.datetime.eval( # type: ignore[union-attr] # str has no attribute "eval"
71+
config,
72+
**additional_parameters,
73+
)
74+
),
75+
datetime_format,
7076
) # type: ignore # datetime is always cast to an interpolated string
7177

7278
if self.min_datetime:

airbyte_cdk/sources/declarative/interpolation/jinja.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -120,7 +120,7 @@ def _literal_eval(self, result: Optional[str], valid_types: Optional[Tuple[Type[
120120
def _eval(self, s: Optional[str], context: Mapping[str, Any]) -> Optional[str]:
121121
try:
122122
undeclared = self._find_undeclared_variables(s)
123-
undeclared_not_in_context = {var for var in undeclared if var not in context}
123+
undeclared_not_in_context = {var for var in undeclared if var not in context} # type: ignore [attr-defined] # `Template` class not iterable
124124
if undeclared_not_in_context:
125125
raise ValueError(
126126
f"Jinja macro has undeclared variables: {undeclared_not_in_context}. Context: {context}"
@@ -137,11 +137,11 @@ def _find_undeclared_variables(self, s: Optional[str]) -> Template:
137137
Find undeclared variables and cache them
138138
"""
139139
ast = self._environment.parse(s) # type: ignore # parse is able to handle None
140-
return meta.find_undeclared_variables(ast)
140+
return meta.find_undeclared_variables(ast) # type: ignore [return-value] # Expected `Template` but got `set[str]`
141141

142142
@cache
143143
def _compile(self, s: Optional[str]) -> Template:
144144
"""
145145
We must cache the Jinja Template ourselves because we're using `from_string` instead of a template loader
146146
"""
147-
return self._environment.from_string(s)
147+
return self._environment.from_string(s) # type: ignore [arg-type] # Expected `str | Template` but passed `str | None`

airbyte_cdk/sources/embedded/base_integration.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -52,8 +52,9 @@ def _load_data(
5252
for message in self.source.read(self.config, configured_catalog, state):
5353
if message.type == Type.RECORD:
5454
output = self._handle_record(
55-
message.record, get_defined_id(stream, message.record.data)
56-
) # type: ignore[union-attr] # record has `data`
55+
message.record,
56+
get_defined_id(stream, message.record.data), # type: ignore[union-attr, arg-type]
57+
)
5758
if output:
5859
yield output
5960
elif message.type is Type.STATE and message.state:

airbyte_cdk/sources/file_based/file_types/avro_parser.py

Lines changed: 13 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -64,18 +64,20 @@ async def infer_schema(
6464
raise ValueError(f"Expected ParquetFormat, got {avro_format}")
6565

6666
with stream_reader.open_file(file, self.file_read_mode, self.ENCODING, logger) as fp:
67-
avro_reader = fastavro.reader(fp)
67+
avro_reader = fastavro.reader(fp) # type: ignore [arg-type]
6868
avro_schema = avro_reader.writer_schema
69-
if not avro_schema["type"] == "record":
70-
unsupported_type = avro_schema["type"]
69+
if not avro_schema["type"] == "record": # type: ignore [index, call-overload]
70+
unsupported_type = avro_schema["type"] # type: ignore [index, call-overload]
7171
raise ValueError(
7272
f"Only record based avro files are supported. Found {unsupported_type}"
7373
)
7474
json_schema = {
75-
field["name"]: AvroParser._convert_avro_type_to_json(
76-
avro_format, field["name"], field["type"]
75+
field["name"]: AvroParser._convert_avro_type_to_json( # type: ignore [index]
76+
avro_format,
77+
field["name"], # type: ignore [index]
78+
field["type"], # type: ignore [index]
7779
)
78-
for field in avro_schema["fields"]
80+
for field in avro_schema["fields"] # type: ignore [index, call-overload]
7981
}
8082
return json_schema
8183

@@ -180,18 +182,19 @@ def parse_records(
180182
line_no = 0
181183
try:
182184
with stream_reader.open_file(file, self.file_read_mode, self.ENCODING, logger) as fp:
183-
avro_reader = fastavro.reader(fp)
185+
avro_reader = fastavro.reader(fp) # type: ignore [arg-type]
184186
schema = avro_reader.writer_schema
185187
schema_field_name_to_type = {
186-
field["name"]: cast(dict, field["type"]) for field in schema["fields"]
188+
field["name"]: cast(dict[str, Any], field["type"]) # type: ignore [index]
189+
for field in schema["fields"] # type: ignore [index, call-overload] # If schema is not dict, it is not subscriptable by strings
187190
}
188191
for record in avro_reader:
189192
line_no += 1
190193
yield {
191194
record_field: self._to_output_value(
192195
avro_format,
193-
schema_field_name_to_type[record_field],
194-
record[record_field],
196+
schema_field_name_to_type[record_field], # type: ignore [index] # Any not subscriptable
197+
record[record_field], # type: ignore [index] # Any not subscriptable
195198
)
196199
for record_field, record_value in schema_field_name_to_type.items()
197200
}

airbyte_cdk/sources/file_based/file_types/excel_parser.py

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -70,7 +70,10 @@ async def infer_schema(
7070
for column, df_type in df.dtypes.items():
7171
# Choose the broadest data type if the column's data type differs in dataframes
7272
prev_frame_column_type = fields.get(column)
73-
fields[column] = self.dtype_to_json_type(prev_frame_column_type, df_type)
73+
fields[column] = self.dtype_to_json_type( # type: ignore [index]
74+
prev_frame_column_type,
75+
df_type,
76+
)
7477

7578
schema = {
7679
field: (
@@ -187,4 +190,4 @@ def open_and_parse_file(fp: Union[IOBase, str, Path]) -> pd.DataFrame:
187190
Returns:
188191
pd.DataFrame: Parsed data from the Excel file.
189192
"""
190-
return pd.ExcelFile(fp, engine="calamine").parse()
193+
return pd.ExcelFile(fp, engine="calamine").parse() # type: ignore [arg-type]

airbyte_cdk/sources/http_logger.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@ def format_http_message(
1414
title: str,
1515
description: str,
1616
stream_name: Optional[str],
17-
is_auxiliary: bool = None,
17+
is_auxiliary: bool | None = None,
1818
) -> LogMessage:
1919
request = response.request
2020
log_message = {
@@ -42,10 +42,10 @@ def format_http_message(
4242
"url": {"full": request.url},
4343
}
4444
if is_auxiliary is not None:
45-
log_message["http"]["is_auxiliary"] = is_auxiliary
45+
log_message["http"]["is_auxiliary"] = is_auxiliary # type: ignore [index]
4646
if stream_name:
4747
log_message["airbyte_cdk"] = {"stream": {"name": stream_name}}
48-
return log_message
48+
return log_message # type: ignore [return-value] # got "dict[str, object]", expected "dict[str, JsonType]"
4949

5050

5151
def _normalize_body_string(body_str: Optional[Union[str, bytes]]) -> Optional[str]:

airbyte_cdk/sources/streams/concurrent/adapters.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -300,7 +300,7 @@ def read(self) -> Iterable[Record]:
300300
yield Record(
301301
data=data_to_return,
302302
stream_name=self.stream_name(),
303-
associated_slice=self._slice,
303+
associated_slice=self._slice, # type: ignore [arg-type]
304304
)
305305
else:
306306
self._message_repository.emit_message(record_data)

0 commit comments

Comments
 (0)