Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Bumping to 1.0.0b1 #243

Merged
merged 14 commits into from
Oct 27, 2021
26 changes: 0 additions & 26 deletions .bumpversion-dbt.cfg

This file was deleted.

3 changes: 1 addition & 2 deletions .bumpversion.cfg
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
[bumpversion]
current_version = 0.21.0
current_version = 1.0.0b1
parse = (?P<major>\d+)
\.(?P<minor>\d+)
\.(?P<patch>\d+)
Expand Down Expand Up @@ -27,4 +27,3 @@ first_value = 1
first_value = 1

[bumpversion:file:dbt/adapters/spark/__version__.py]

2 changes: 1 addition & 1 deletion dbt/adapters/spark/__version__.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
version = "0.21.0"
version = "1.0.0b1"
7 changes: 6 additions & 1 deletion dev_requirements.txt
Original file line number Diff line number Diff line change
@@ -1,3 +1,7 @@
# install latest changes in dbt-core
# TODO: how to automate switching from develop to version branches?
git+https://github.com/dbt-labs/dbt.git#egg=dbt-core&subdirectory=core

freezegun==0.3.9
pytest==6.0.2
mock>=1.3.0
Expand All @@ -11,6 +15,7 @@ flaky>=3.5.3,<4
pytest-csv

# Test requirements
pytest-dbt-adapter==0.5.1
#pytest-dbt-adapter==0.5.1
git+https://github.com/dbt-labs/dbt-adapter-tests.git#egg=pytest-dbt-adapter
sasl==0.2.1
thrift_sasl==0.4.1
1 change: 0 additions & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
dbt-core==0.21.0
PyHive[hive]>=0.6.0,<0.7.0
pyodbc>=4.0.30
sqlparams>=3.0.0
Expand Down
68 changes: 46 additions & 22 deletions setup.py
Original file line number Diff line number Diff line change
@@ -1,41 +1,65 @@
#!/usr/bin/env python
from setuptools import find_namespace_packages, setup
import os
import sys
import re

# require python 3.6 or newer
if sys.version_info < (3, 6):
print('Error: dbt does not support this version of Python.')
print('Please upgrade to Python 3.6 or higher.')
sys.exit(1)


# require version of setuptools that supports find_namespace_packages
from setuptools import setup
try:
from setuptools import find_namespace_packages
except ImportError:
# the user has a downlevel version of setuptools.
print('Error: dbt requires setuptools v40.1.0 or higher.')
print('Please upgrade setuptools with "pip install --upgrade setuptools" '
'and try again')
sys.exit(1)


# pull long description from README
this_directory = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(this_directory, 'README.md'), 'r', encoding='utf8') as f:
long_description = f.read()


package_name = "dbt-spark"


# get this from a separate file
def _dbt_spark_version():
# get this package's version from dbt/adapters/<name>/__version__.py
def _get_plugin_version_dict():
_version_path = os.path.join(
this_directory, 'dbt', 'adapters', 'spark', '__version__.py'
)
_version_pattern = r'''version\s*=\s*["'](.+)["']'''
_semver = r'''(?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)'''
_pre = r'''((?P<prekind>a|b|rc)(?P<pre>\d+))?'''
_version_pattern = fr'''version\s*=\s*["']{_semver}{_pre}["']'''
with open(_version_path) as f:
match = re.search(_version_pattern, f.read().strip())
if match is None:
raise ValueError(f'invalid version at {_version_path}')
return match.group(1)
return match.groupdict()


package_version = _dbt_spark_version()
description = """The SparkSQL plugin for dbt (data build tool)"""
def _get_plugin_version():
parts = _get_plugin_version_dict()
return "{major}.{minor}.{patch}{prekind}{pre}".format(**parts)

dbt_version = '0.21.0'
# the package version should be the dbt version, with maybe some things on the
# ends of it. (0.21.0 vs 0.21.0a1, 0.21.0.1, ...)
if not package_version.startswith(dbt_version):
raise ValueError(
f'Invalid setup.py: package_version={package_version} must start with '
f'dbt_version={dbt_version}'
)

# require a compatible minor version (~=), prerelease if this is a prerelease
def _get_dbt_core_version():
parts = _get_plugin_version_dict()
minor = "{major}.{minor}.0".format(**parts)
pre = (parts["prekind"]+"1" if parts["prekind"] else "")
return f"{minor}{pre}"


package_name = "dbt-spark"
package_version = _get_plugin_version()
dbt_core_version = _get_dbt_core_version()
description = """The Apache Spark adapter plugin for dbt"""

odbc_extras = ['pyodbc>=4.0.30']
pyhive_extras = [
Expand All @@ -52,14 +76,14 @@ def _dbt_spark_version():
long_description=long_description,
long_description_content_type='text/markdown',

author='Fishtown Analytics',
author_email='info@fishtownanalytics.com',
url='https://github.com/fishtown-analytics/dbt-spark',
author='dbt Labs',
author_email='info@dbtlabs.com',
url='https://github.com/dbt-labs/dbt-spark',

packages=find_namespace_packages(include=['dbt', 'dbt.*']),
include_package_data=True,
install_requires=[
f'dbt-core=={dbt_version}',
'dbt-core~={}'.format(dbt_core_version),
'sqlparams>=3.0.0',
],
extras_require={
Expand Down
Loading