Remove unused imports

This commit is contained in:
Harshal Sheth 2021-02-11 21:59:54 -08:00 committed by Shirshanka Das
parent 43d5fac494
commit 7ca018aaa4
24 changed files with 44 additions and 72 deletions

View File

@ -21,12 +21,12 @@ jobs:
python -m pip install --upgrade pip
pip install -e .
pip install -r test_requirements.txt
- name: Check formatting with black
run: |
black --exclude 'gometa/metadata' -S -t py36 -l 120 src tests
- name: Lint with flake8
run: |
# stop the build if there are Python syntax errors or undefined names
#flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics
# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics
flake8 src tests --count --statistics || true
- name: Check with mypy
run: |
mypy -p gometa || true

View File

@ -1,6 +1,14 @@
[flake8]
max-line-length = 130
max-line-length = 120
max-complexity = 15
ignore = D203
exclude =
.git,
src/gometa/metadata,
__pycache__
per-file-ignores =
# imported but unused
__init__.py: F401
[mypy]
mypy_path = src

View File

@ -1,7 +1,6 @@
from abc import ABC, abstractmethod
from typing import TypeVar, Type, List, IO
from typing import List, IO
from pydantic import BaseModel, ValidationError
from pathlib import Path
from contextlib import contextmanager
import re

View File

@ -1,5 +1,4 @@
from typing import Optional
from pydantic import BaseModel, Field, ValidationError, validator
from pydantic import BaseModel, validator
class _KafkaConnectionConfig(BaseModel):

View File

@ -1,7 +1,7 @@
from typing import IO
import yaml
from .common import ConfigModel, ConfigurationMechanism
from gometa.configuration import ConfigurationMechanism
class YamlConfigurationMechanism(ConfigurationMechanism):

View File

@ -6,7 +6,7 @@ import click
from gometa.configuration.common import ConfigurationMechanism, ConfigurationError, nicely_formatted_validation_errors
from gometa.configuration.yaml import YamlConfigurationMechanism
from gometa.configuration.toml import TomlConfigurationMechanism
from gometa.ingestion.run.pipeline import Pipeline, PipelineConfig
from gometa.ingestion.run.pipeline import Pipeline
logger = logging.getLogger(__name__)

View File

@ -1,5 +1,3 @@
from abc import abstractmethod, ABCMeta
class Closeable:
def close(self):

View File

@ -1,5 +1,5 @@
from dataclasses import dataclass
from typing import TypeVar, Generic, Optional
from typing import TypeVar, Generic
from abc import abstractmethod, ABCMeta
T = TypeVar('T')

View File

@ -1,4 +1,3 @@
from abc import abstractmethod, ABCMeta
from dataclasses import dataclass
import json
import pprint

View File

@ -1,5 +1,5 @@
from typing import Iterable
from gometa.ingestion.api.source import Extractor, WorkUnit
from gometa.ingestion.api.source import Extractor
from gometa.ingestion.api import RecordEnvelope
from gometa.ingestion.api.common import PipelineContext
from gometa.metadata.com.linkedin.pegasus2avro.mxe import MetadataChangeEvent

View File

@ -1,10 +1,8 @@
import logging
from typing import List, Dict, Any
from typing import List, Any
import avro.schema
from gometa.metadata.com.linkedin.pegasus2avro.schema import (
SchemaMetadata,
KafkaSchema,
SchemaField,
SchemaFieldDataType,
BooleanTypeClass,
@ -14,7 +12,6 @@ from gometa.metadata.com.linkedin.pegasus2avro.schema import (
NumberTypeClass,
EnumTypeClass,
NullTypeClass,
MapTypeClass,
ArrayTypeClass,
UnionTypeClass,
RecordTypeClass,

View File

@ -1,14 +1,9 @@
from typing import Dict
from pydantic import BaseModel
from dataclasses import dataclass, field
import pprint
from gometa.configuration.common import DynamicTypedConfig, ConfigModel
from gometa.ingestion.api.source import Source, Extractor
from gometa.ingestion.source import source_class_mapping
from gometa.ingestion.api.common import PipelineContext
from gometa.ingestion.api.sink import Sink, NoopWriteCallback, WriteCallback
from gometa.ingestion.api.sink import Sink, WriteCallback
from gometa.ingestion.sink import sink_class_mapping
from typing import Optional
import importlib
import time
import logging
@ -77,7 +72,7 @@ class Pipeline:
def run(self):
callback = LoggingCallback()
extractor = self.extractor_class()
extractor: Extractor = self.extractor_class()
for wu in self.source.get_workunits():
# TODO: change extractor interface
extractor.configure({}, self.ctx)

View File

@ -1,7 +1,5 @@
from dataclasses import dataclass, field
import json
from typing import Optional, TypeVar, Type
from pydantic import BaseModel, Field, ValidationError, validator
from dataclasses import dataclass
from pydantic import BaseModel
from gometa.ingestion.api.sink import Sink, WriteCallback, SinkReport
from gometa.ingestion.api.common import RecordEnvelope, WorkUnit, PipelineContext
from gometa.configuration.kafka import KafkaProducerConnectionConfig

View File

@ -1,14 +1,10 @@
from abc import ABC, abstractmethod
from dataclasses import dataclass, field
from typing import Optional, TypeVar, Type, Dict
from pydantic import BaseModel, Field, ValidationError, validator
from enum import Enum
from pathlib import Path
from typing import Type, Dict
from pydantic import BaseModel
import requests
from requests.exceptions import HTTPError
from gometa.ingestion.api.sink import Sink, WriteCallback, SinkReport
from gometa.ingestion.api.common import RecordEnvelope, WorkUnit
import json
from gometa.metadata.com.linkedin.pegasus2avro.mxe import MetadataChangeEvent
from gometa.metadata import (
ChartSnapshotClass,
@ -18,7 +14,7 @@ from gometa.metadata import (
DatasetSnapshotClass,
DataProcessSnapshotClass,
MLModelSnapshotClass,
MLFeatureSnapshotClass,
# MLFeatureSnapshotClass,
)
from collections import OrderedDict

View File

@ -1,7 +1,6 @@
from gometa.ingestion.api.sink import Sink, WriteCallback, SinkReport
from gometa.ingestion.api.common import RecordEnvelope, PipelineContext, WorkUnit
from gometa.ingestion.api.common import RecordEnvelope, PipelineContext
from pydantic import BaseModel
import os
import pathlib
import logging
import json

View File

@ -1,14 +1,12 @@
import logging
from gometa.configuration import ConfigModel
from gometa.configuration.kafka import KafkaConsumerConnectionConfig
from gometa.ingestion.api.source import Source, Extractor, SourceReport
from gometa.ingestion.api.source import WorkUnit
from typing import Optional, Iterable, List, Dict, Any
from gometa.ingestion.api.source import Source, SourceReport
from typing import Iterable, List, Dict, Any
from dataclasses import dataclass, field
import confluent_kafka
from confluent_kafka.schema_registry.schema_registry_client import SchemaRegistryClient
import re
from gometa.ingestion.api.closeable import Closeable
from gometa.ingestion.source.metadata_common import MetadataWorkUnit
import time

View File

@ -1,7 +1,7 @@
import json
from dataclasses import dataclass, field
from pydantic import BaseModel
from typing import Optional, Iterable
from typing import Iterable
from gometa.ingestion.api.source import Source, SourceReport
from gometa.ingestion.source.metadata_common import MetadataWorkUnit
from gometa.metadata.com.linkedin.pegasus2avro.mxe import MetadataChangeEvent

View File

@ -3,7 +3,7 @@ from sqlalchemy import types
from sqlalchemy.engine import reflection
from gometa.metadata.com.linkedin.pegasus2avro.mxe import MetadataChangeEvent
from gometa.metadata.com.linkedin.pegasus2avro.metadata.snapshot import DatasetSnapshot
from gometa.metadata.com.linkedin.pegasus2avro.schema import SchemaMetadata, MySqlDDL
from gometa.metadata.com.linkedin.pegasus2avro.schema import SchemaMetadata, MySqlDDL, SchemaField, SchemaFieldDataType
from gometa.metadata.com.linkedin.pegasus2avro.common import AuditStamp
from gometa.ingestion.api.source import WorkUnit, Source, SourceReport
@ -15,21 +15,13 @@ from typing import Optional, List, Any, Dict
from dataclasses import dataclass, field
from gometa.metadata.com.linkedin.pegasus2avro.schema import (
SchemaMetadata,
KafkaSchema,
SchemaField,
SchemaFieldDataType,
BooleanTypeClass,
FixedTypeClass,
StringTypeClass,
BytesTypeClass,
NumberTypeClass,
EnumTypeClass,
NullTypeClass,
MapTypeClass,
ArrayTypeClass,
UnionTypeClass,
RecordTypeClass,
)
logger = logging.getLogger(__name__)

View File

@ -1,6 +1,4 @@
import os
import pytest
import subprocess
import mce_helpers

View File

@ -1,11 +1,7 @@
import os
import pytest
import subprocess
import time
def test_ingest(sql_server, pytestconfig):
docker = "docker"
command = f"{docker} exec testsqlserver /opt/mssql-tools/bin/sqlcmd -S localhost -U sa -P 'test!Password' -d master -i /setup/setup.sql"

View File

@ -1,8 +1,3 @@
import os
import pytest
import subprocess
import shutil
from gometa.ingestion.run.pipeline import Pipeline
import mce_helpers
@ -19,8 +14,8 @@ def test_serde_large(pytestconfig, tmp_path):
pipeline = Pipeline.create(
{
'source': {'type': 'file', 'file': {'filename': str(golden_file),},},
'sink': {'type': 'file', 'file': {'filename': str(output_file),},},
'source': {'type': 'file', 'file': {'filename': str(golden_file)}},
'sink': {'type': 'file', 'file': {'filename': str(output_file)}},
}
)
pipeline.run()

View File

@ -3,14 +3,19 @@ from gometa.configuration.common import AllowDenyPattern
def test_allow_all():
pattern = AllowDenyPattern.allow_all()
assert pattern.allowed("foo.table") == True
assert pattern.allowed("foo.table")
def test_deny_all():
pattern = AllowDenyPattern(allow=[], deny=[".*"])
assert pattern.allowed("foo.table") == False
assert not pattern.allowed("foo.table")
def test_single_table():
pattern = AllowDenyPattern(allow=["foo.mytable"])
assert pattern.allowed("foo.mytable") == True
assert pattern.allowed("foo.mytable")
def test_default_deny():
pattern = AllowDenyPattern(allow=["foo.mytable"])
assert not pattern.allowed("foo.bar")

View File

@ -3,7 +3,7 @@ from gometa.ingestion.api.sink import WriteCallback, SinkReport
import unittest
from unittest.mock import patch, MagicMock
from gometa.ingestion.api.common import RecordEnvelope, PipelineContext
from gometa.ingestion.api.common import RecordEnvelope
class KafkaSinkTest(unittest.TestCase):

View File

@ -9,7 +9,7 @@ class PipelineTest(unittest.TestCase):
@patch("gometa.ingestion.sink.console.ConsoleSink.close")
def test_configure(self, mock_sink, mock_source):
pipeline = Pipeline.create(
{"source": {"type": "kafka", "kafka": {"bootstrap": "localhost:9092"},}, "sink": {"type": "console"},}
{"source": {"type": "kafka", "kafka": {"bootstrap": "localhost:9092"}}, "sink": {"type": "console"}}
)
pipeline.run()
mock_source.assert_called_once()