Harshal Sheth c7892ada4c Codegen avro + datahub kafka sink (#3)
* Add codegen

* New architecture + setup file -> console pipeline

* Cleanup source loader

* Basic Kafka metadata source

* Kafka source and extractor

* Add kwargs construct interface

* Fix kafka source unit test

* start working on pipeline test

* kafka datahub sink

* Make myself a profile

* Ingest to datahub from kafka

* Update codegen

* Add restli transport

* Fix bug in restli conversion
2021-02-15 18:29:27 -08:00

26 lines
675 B
Python

import json
import sys
from avrogen import write_schema_files
import click
@click.command()
@click.argument("schema_file", type=click.Path(exists=True))
@click.argument("outdir", type=click.Path())
def generate(schema_file: str, outdir: str):
# print(f'using {schema_file}')
with open(schema_file) as f:
raw_schema_text = f.read()
no_spaces_schema = json.dumps(json.loads(raw_schema_text))
schema_json = no_spaces_schema.replace('{"type": "string", "avro.java.string": "String"}', '"string"')
redo_spaces = json.dumps(json.loads(schema_json), indent=2)
write_schema_files(redo_spaces, outdir)
if __name__ == "__main__":
generate()