Remove unused imports

This commit is contained in:
Harshal Sheth 2021-02-11 21:59:54 -08:00 committed by Shirshanka Das
parent 43d5fac494
commit 7ca018aaa4
24 changed files with 44 additions and 72 deletions

View File

@ -21,12 +21,12 @@ jobs:
python -m pip install --upgrade pip python -m pip install --upgrade pip
pip install -e . pip install -e .
pip install -r test_requirements.txt pip install -r test_requirements.txt
- name: Check formatting with black
run: |
black --exclude 'gometa/metadata' -S -t py36 -l 120 src tests
- name: Lint with flake8 - name: Lint with flake8
run: | run: |
# stop the build if there are Python syntax errors or undefined names flake8 src tests --count --statistics || true
#flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics
# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics
- name: Check with mypy - name: Check with mypy
run: | run: |
mypy -p gometa || true mypy -p gometa || true

View File

@ -1,6 +1,14 @@
[flake8] [flake8]
max-line-length = 130 max-line-length = 120
max-complexity = 15 max-complexity = 15
ignore = D203
exclude =
.git,
src/gometa/metadata,
__pycache__
per-file-ignores =
# imported but unused
__init__.py: F401
[mypy] [mypy]
mypy_path = src mypy_path = src

View File

@ -1,7 +1,6 @@
from abc import ABC, abstractmethod from abc import ABC, abstractmethod
from typing import TypeVar, Type, List, IO from typing import List, IO
from pydantic import BaseModel, ValidationError from pydantic import BaseModel, ValidationError
from pathlib import Path
from contextlib import contextmanager from contextlib import contextmanager
import re import re

View File

@ -1,5 +1,4 @@
from typing import Optional from pydantic import BaseModel, validator
from pydantic import BaseModel, Field, ValidationError, validator
class _KafkaConnectionConfig(BaseModel): class _KafkaConnectionConfig(BaseModel):

View File

@ -1,7 +1,7 @@
from typing import IO from typing import IO
import yaml import yaml
from .common import ConfigModel, ConfigurationMechanism from gometa.configuration import ConfigurationMechanism
class YamlConfigurationMechanism(ConfigurationMechanism): class YamlConfigurationMechanism(ConfigurationMechanism):

View File

@ -6,7 +6,7 @@ import click
from gometa.configuration.common import ConfigurationMechanism, ConfigurationError, nicely_formatted_validation_errors from gometa.configuration.common import ConfigurationMechanism, ConfigurationError, nicely_formatted_validation_errors
from gometa.configuration.yaml import YamlConfigurationMechanism from gometa.configuration.yaml import YamlConfigurationMechanism
from gometa.configuration.toml import TomlConfigurationMechanism from gometa.configuration.toml import TomlConfigurationMechanism
from gometa.ingestion.run.pipeline import Pipeline, PipelineConfig from gometa.ingestion.run.pipeline import Pipeline
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View File

@ -1,5 +1,3 @@
from abc import abstractmethod, ABCMeta
class Closeable: class Closeable:
def close(self): def close(self):

View File

@ -1,5 +1,5 @@
from dataclasses import dataclass from dataclasses import dataclass
from typing import TypeVar, Generic, Optional from typing import TypeVar, Generic
from abc import abstractmethod, ABCMeta from abc import abstractmethod, ABCMeta
T = TypeVar('T') T = TypeVar('T')

View File

@ -1,4 +1,3 @@
from abc import abstractmethod, ABCMeta
from dataclasses import dataclass from dataclasses import dataclass
import json import json
import pprint import pprint

View File

@ -1,5 +1,5 @@
from typing import Iterable from typing import Iterable
from gometa.ingestion.api.source import Extractor, WorkUnit from gometa.ingestion.api.source import Extractor
from gometa.ingestion.api import RecordEnvelope from gometa.ingestion.api import RecordEnvelope
from gometa.ingestion.api.common import PipelineContext from gometa.ingestion.api.common import PipelineContext
from gometa.metadata.com.linkedin.pegasus2avro.mxe import MetadataChangeEvent from gometa.metadata.com.linkedin.pegasus2avro.mxe import MetadataChangeEvent

View File

@ -1,10 +1,8 @@
import logging import logging
from typing import List, Dict, Any from typing import List, Any
import avro.schema import avro.schema
from gometa.metadata.com.linkedin.pegasus2avro.schema import ( from gometa.metadata.com.linkedin.pegasus2avro.schema import (
SchemaMetadata,
KafkaSchema,
SchemaField, SchemaField,
SchemaFieldDataType, SchemaFieldDataType,
BooleanTypeClass, BooleanTypeClass,
@ -14,7 +12,6 @@ from gometa.metadata.com.linkedin.pegasus2avro.schema import (
NumberTypeClass, NumberTypeClass,
EnumTypeClass, EnumTypeClass,
NullTypeClass, NullTypeClass,
MapTypeClass,
ArrayTypeClass, ArrayTypeClass,
UnionTypeClass, UnionTypeClass,
RecordTypeClass, RecordTypeClass,

View File

@ -1,14 +1,9 @@
from typing import Dict
from pydantic import BaseModel
from dataclasses import dataclass, field
import pprint
from gometa.configuration.common import DynamicTypedConfig, ConfigModel from gometa.configuration.common import DynamicTypedConfig, ConfigModel
from gometa.ingestion.api.source import Source, Extractor from gometa.ingestion.api.source import Source, Extractor
from gometa.ingestion.source import source_class_mapping from gometa.ingestion.source import source_class_mapping
from gometa.ingestion.api.common import PipelineContext from gometa.ingestion.api.common import PipelineContext
from gometa.ingestion.api.sink import Sink, NoopWriteCallback, WriteCallback from gometa.ingestion.api.sink import Sink, WriteCallback
from gometa.ingestion.sink import sink_class_mapping from gometa.ingestion.sink import sink_class_mapping
from typing import Optional
import importlib import importlib
import time import time
import logging import logging
@ -77,7 +72,7 @@ class Pipeline:
def run(self): def run(self):
callback = LoggingCallback() callback = LoggingCallback()
extractor = self.extractor_class() extractor: Extractor = self.extractor_class()
for wu in self.source.get_workunits(): for wu in self.source.get_workunits():
# TODO: change extractor interface # TODO: change extractor interface
extractor.configure({}, self.ctx) extractor.configure({}, self.ctx)

View File

@ -1,7 +1,5 @@
from dataclasses import dataclass, field from dataclasses import dataclass
import json from pydantic import BaseModel
from typing import Optional, TypeVar, Type
from pydantic import BaseModel, Field, ValidationError, validator
from gometa.ingestion.api.sink import Sink, WriteCallback, SinkReport from gometa.ingestion.api.sink import Sink, WriteCallback, SinkReport
from gometa.ingestion.api.common import RecordEnvelope, WorkUnit, PipelineContext from gometa.ingestion.api.common import RecordEnvelope, WorkUnit, PipelineContext
from gometa.configuration.kafka import KafkaProducerConnectionConfig from gometa.configuration.kafka import KafkaProducerConnectionConfig

View File

@ -1,14 +1,10 @@
from abc import ABC, abstractmethod
from dataclasses import dataclass, field from dataclasses import dataclass, field
from typing import Optional, TypeVar, Type, Dict from typing import Type, Dict
from pydantic import BaseModel, Field, ValidationError, validator from pydantic import BaseModel
from enum import Enum
from pathlib import Path
import requests import requests
from requests.exceptions import HTTPError from requests.exceptions import HTTPError
from gometa.ingestion.api.sink import Sink, WriteCallback, SinkReport from gometa.ingestion.api.sink import Sink, WriteCallback, SinkReport
from gometa.ingestion.api.common import RecordEnvelope, WorkUnit from gometa.ingestion.api.common import RecordEnvelope, WorkUnit
import json
from gometa.metadata.com.linkedin.pegasus2avro.mxe import MetadataChangeEvent from gometa.metadata.com.linkedin.pegasus2avro.mxe import MetadataChangeEvent
from gometa.metadata import ( from gometa.metadata import (
ChartSnapshotClass, ChartSnapshotClass,
@ -18,7 +14,7 @@ from gometa.metadata import (
DatasetSnapshotClass, DatasetSnapshotClass,
DataProcessSnapshotClass, DataProcessSnapshotClass,
MLModelSnapshotClass, MLModelSnapshotClass,
MLFeatureSnapshotClass, # MLFeatureSnapshotClass,
) )
from collections import OrderedDict from collections import OrderedDict

View File

@ -1,7 +1,6 @@
from gometa.ingestion.api.sink import Sink, WriteCallback, SinkReport from gometa.ingestion.api.sink import Sink, WriteCallback, SinkReport
from gometa.ingestion.api.common import RecordEnvelope, PipelineContext, WorkUnit from gometa.ingestion.api.common import RecordEnvelope, PipelineContext
from pydantic import BaseModel from pydantic import BaseModel
import os
import pathlib import pathlib
import logging import logging
import json import json

View File

@ -1,14 +1,12 @@
import logging import logging
from gometa.configuration import ConfigModel from gometa.configuration import ConfigModel
from gometa.configuration.kafka import KafkaConsumerConnectionConfig from gometa.configuration.kafka import KafkaConsumerConnectionConfig
from gometa.ingestion.api.source import Source, Extractor, SourceReport from gometa.ingestion.api.source import Source, SourceReport
from gometa.ingestion.api.source import WorkUnit from typing import Iterable, List, Dict, Any
from typing import Optional, Iterable, List, Dict, Any
from dataclasses import dataclass, field from dataclasses import dataclass, field
import confluent_kafka import confluent_kafka
from confluent_kafka.schema_registry.schema_registry_client import SchemaRegistryClient from confluent_kafka.schema_registry.schema_registry_client import SchemaRegistryClient
import re import re
from gometa.ingestion.api.closeable import Closeable
from gometa.ingestion.source.metadata_common import MetadataWorkUnit from gometa.ingestion.source.metadata_common import MetadataWorkUnit
import time import time

View File

@ -1,7 +1,7 @@
import json import json
from dataclasses import dataclass, field from dataclasses import dataclass, field
from pydantic import BaseModel from pydantic import BaseModel
from typing import Optional, Iterable from typing import Iterable
from gometa.ingestion.api.source import Source, SourceReport from gometa.ingestion.api.source import Source, SourceReport
from gometa.ingestion.source.metadata_common import MetadataWorkUnit from gometa.ingestion.source.metadata_common import MetadataWorkUnit
from gometa.metadata.com.linkedin.pegasus2avro.mxe import MetadataChangeEvent from gometa.metadata.com.linkedin.pegasus2avro.mxe import MetadataChangeEvent

View File

@ -3,7 +3,7 @@ from sqlalchemy import types
from sqlalchemy.engine import reflection from sqlalchemy.engine import reflection
from gometa.metadata.com.linkedin.pegasus2avro.mxe import MetadataChangeEvent from gometa.metadata.com.linkedin.pegasus2avro.mxe import MetadataChangeEvent
from gometa.metadata.com.linkedin.pegasus2avro.metadata.snapshot import DatasetSnapshot from gometa.metadata.com.linkedin.pegasus2avro.metadata.snapshot import DatasetSnapshot
from gometa.metadata.com.linkedin.pegasus2avro.schema import SchemaMetadata, MySqlDDL from gometa.metadata.com.linkedin.pegasus2avro.schema import SchemaMetadata, MySqlDDL, SchemaField, SchemaFieldDataType
from gometa.metadata.com.linkedin.pegasus2avro.common import AuditStamp from gometa.metadata.com.linkedin.pegasus2avro.common import AuditStamp
from gometa.ingestion.api.source import WorkUnit, Source, SourceReport from gometa.ingestion.api.source import WorkUnit, Source, SourceReport
@ -15,21 +15,13 @@ from typing import Optional, List, Any, Dict
from dataclasses import dataclass, field from dataclasses import dataclass, field
from gometa.metadata.com.linkedin.pegasus2avro.schema import ( from gometa.metadata.com.linkedin.pegasus2avro.schema import (
SchemaMetadata,
KafkaSchema,
SchemaField,
SchemaFieldDataType,
BooleanTypeClass, BooleanTypeClass,
FixedTypeClass,
StringTypeClass, StringTypeClass,
BytesTypeClass, BytesTypeClass,
NumberTypeClass, NumberTypeClass,
EnumTypeClass, EnumTypeClass,
NullTypeClass, NullTypeClass,
MapTypeClass,
ArrayTypeClass, ArrayTypeClass,
UnionTypeClass,
RecordTypeClass,
) )
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View File

@ -1,6 +1,4 @@
import os import os
import pytest
import subprocess
import mce_helpers import mce_helpers

View File

@ -1,11 +1,7 @@
import os import os
import pytest
import subprocess import subprocess
import time
def test_ingest(sql_server, pytestconfig): def test_ingest(sql_server, pytestconfig):
docker = "docker" docker = "docker"
command = f"{docker} exec testsqlserver /opt/mssql-tools/bin/sqlcmd -S localhost -U sa -P 'test!Password' -d master -i /setup/setup.sql" command = f"{docker} exec testsqlserver /opt/mssql-tools/bin/sqlcmd -S localhost -U sa -P 'test!Password' -d master -i /setup/setup.sql"

View File

@ -1,8 +1,3 @@
import os
import pytest
import subprocess
import shutil
from gometa.ingestion.run.pipeline import Pipeline from gometa.ingestion.run.pipeline import Pipeline
import mce_helpers import mce_helpers
@ -19,8 +14,8 @@ def test_serde_large(pytestconfig, tmp_path):
pipeline = Pipeline.create( pipeline = Pipeline.create(
{ {
'source': {'type': 'file', 'file': {'filename': str(golden_file),},}, 'source': {'type': 'file', 'file': {'filename': str(golden_file)}},
'sink': {'type': 'file', 'file': {'filename': str(output_file),},}, 'sink': {'type': 'file', 'file': {'filename': str(output_file)}},
} }
) )
pipeline.run() pipeline.run()

View File

@ -3,14 +3,19 @@ from gometa.configuration.common import AllowDenyPattern
def test_allow_all(): def test_allow_all():
pattern = AllowDenyPattern.allow_all() pattern = AllowDenyPattern.allow_all()
assert pattern.allowed("foo.table") == True assert pattern.allowed("foo.table")
def test_deny_all(): def test_deny_all():
pattern = AllowDenyPattern(allow=[], deny=[".*"]) pattern = AllowDenyPattern(allow=[], deny=[".*"])
assert pattern.allowed("foo.table") == False assert not pattern.allowed("foo.table")
def test_single_table(): def test_single_table():
pattern = AllowDenyPattern(allow=["foo.mytable"]) pattern = AllowDenyPattern(allow=["foo.mytable"])
assert pattern.allowed("foo.mytable") == True assert pattern.allowed("foo.mytable")
def test_default_deny():
pattern = AllowDenyPattern(allow=["foo.mytable"])
assert not pattern.allowed("foo.bar")

View File

@ -3,7 +3,7 @@ from gometa.ingestion.api.sink import WriteCallback, SinkReport
import unittest import unittest
from unittest.mock import patch, MagicMock from unittest.mock import patch, MagicMock
from gometa.ingestion.api.common import RecordEnvelope, PipelineContext from gometa.ingestion.api.common import RecordEnvelope
class KafkaSinkTest(unittest.TestCase): class KafkaSinkTest(unittest.TestCase):

View File

@ -9,7 +9,7 @@ class PipelineTest(unittest.TestCase):
@patch("gometa.ingestion.sink.console.ConsoleSink.close") @patch("gometa.ingestion.sink.console.ConsoleSink.close")
def test_configure(self, mock_sink, mock_source): def test_configure(self, mock_sink, mock_source):
pipeline = Pipeline.create( pipeline = Pipeline.create(
{"source": {"type": "kafka", "kafka": {"bootstrap": "localhost:9092"},}, "sink": {"type": "console"},} {"source": {"type": "kafka", "kafka": {"bootstrap": "localhost:9092"}}, "sink": {"type": "console"}}
) )
pipeline.run() pipeline.run()
mock_source.assert_called_once() mock_source.assert_called_once()