Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

change a_star to astar #80

Merged
merged 48 commits into from
Oct 5, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
48 commits
Select commit Hold shift + click to select a range
5d82d97
change python tests/test_a_star/prepare_for_test.py to python pygmtoo…
heatingma Aug 1, 2023
4c05275
add bdist_wheel
heatingma Aug 1, 2023
4155230
delete test/test_a_star
heatingma Aug 1, 2023
b083a7b
add astar ori_files
heatingma Aug 1, 2023
678a49e
Create publish_req.txt
heatingma Aug 1, 2023
6cf8381
Delete a_star.tar.gz
heatingma Aug 1, 2023
3678388
version is a23
heatingma Aug 1, 2023
ef3f41c
Update setup.py
heatingma Aug 1, 2023
439c757
delete some notes
heatingma Aug 1, 2023
6e0ea0e
add alternate url to fix download problem
heatingma Aug 2, 2023
18d3873
add return
heatingma Aug 3, 2023
be38bf0
fix the md5 problem with genn_astar pretrained models
heatingma Aug 3, 2023
d07498e
add the missed ','
heatingma Aug 3, 2023
b5f3133
fix the "diff=200.0" problem
heatingma Aug 3, 2023
57230e8
swap the url and the url_alter
heatingma Aug 3, 2023
55379dc
change the astar_pretrain_path
heatingma Aug 3, 2023
bde8106
Revert "swap the url and the url_alter"
heatingma Aug 4, 2023
220f782
Revert "change the astar_pretrain_path"
heatingma Aug 4, 2023
3143592
change the pretrained path
heatingma Aug 4, 2023
61181f8
only small files has url_alter
heatingma Aug 4, 2023
0642a14
add new url for pretrained modles
heatingma Aug 4, 2023
0eec2ee
add new download pretrained models' paths for jittor backend
heatingma Aug 4, 2023
52143ae
add new download pretrained models' paths for jittor backend
heatingma Aug 4, 2023
329a115
add new download pretrained models' paths for jittor backend
heatingma Aug 4, 2023
f3a78c0
add new download path for pytorch backend pretrained models
heatingma Aug 4, 2023
d8f821b
add new download path for paddle backend
heatingma Aug 4, 2023
c368697
delete some unused url
heatingma Aug 5, 2023
0ae85c7
add new alternate download path for cie and pca
heatingma Aug 5, 2023
35a4e02
don't test neural_solvers now
heatingma Aug 5, 2023
6f42e7c
only test neural_solvers
heatingma Aug 5, 2023
62f2bef
only test neural_solvers
heatingma Aug 5, 2023
9c76481
only test neural_solvers
heatingma Aug 5, 2023
a591e7e
only test neural
heatingma Aug 5, 2023
ceeaf60
add the forget ","
heatingma Aug 5, 2023
18c98a9
add all tests
heatingma Aug 5, 2023
f408a51
add new url_path and change the download func
heatingma Aug 6, 2023
7e4a9de
delete dropout, trust_fact and no_pred_size for astar
heatingma Aug 7, 2023
6e6f773
delete dropout for genn_astar
heatingma Aug 7, 2023
a3ef42e
delete some parameters for astar and genn_astar
heatingma Aug 7, 2023
86ab428
Merge branch 'Thinklab-SJTU:main' into main
heatingma Aug 7, 2023
ba4d408
python
heatingma Aug 8, 2023
6df2f3f
Merge branch 'Thinklab-SJTU:main' into main
heatingma Aug 8, 2023
66a4b05
Merge branch 'Thinklab-SJTU:main' into main
heatingma Aug 22, 2023
37f88af
Merge branch 'Thinklab-SJTU:main' into main
heatingma Sep 4, 2023
8616fb4
change a_star to astar
heatingma Sep 12, 2023
de06854
change the function name "astar" from cython to "c_astar"
heatingma Oct 5, 2023
b0c8c0d
fix: 'module' object is not callable
heatingma Oct 5, 2023
57b0c26
change astar to c_astar
heatingma Oct 5, 2023
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions .github/workflows/python-package.yml
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ jobs:
if [ "${{ matrix.python-version }}" != "3.10" ]; then pip install mindspore==1.10.0; fi
- name: generate astar.so
run: |
python pygmtools/astar/get_astar.py
python pygmtools/c_astar/get_c_astar.py
- name: Lint with flake8
run: |
# stop the build if there are Python syntax errors or undefined names
Expand Down Expand Up @@ -69,7 +69,7 @@ jobs:
pip install -r tests/requirements_win_mac.txt
- name: generate astar.so
run: |
python pygmtools/astar/get_astar.py
python pygmtools/c_astar/get_c_astar.py
- name: Lint with flake8
run: |
# stop the build if there are Python syntax errors or undefined names
Expand Down Expand Up @@ -101,7 +101,7 @@ jobs:
python -m pip install -r tests\requirements_win_mac.txt
- name: generate astar.pyd
run: |
python pygmtools/astar/get_astar.py
python pygmtools/c_astar/get_c_astar.py
- name: Lint with flake8
run: |
# stop the build if there are Python syntax errors or undefined names
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ cdef extern from "priority_queue.hpp":

@cython.boundscheck(False)
@cython.wraparound(False)
def a_star(
def c_astar(
data,
k,
vector[long] ns_1,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,12 @@
from Cython.Build import cythonize
import numpy as np
from glob import glob

setup(
name='a-star function',
name='c_astar function',
ext_modules=cythonize(
Extension(
'a_star',
'c_astar',
glob('*.pyx'),
include_dirs=[np.get_include(),"."],
extra_compile_args=["-std=c++11"],
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,12 @@
import shutil

ori_dir = os.getcwd()
os.chdir('pygmtools/astar')
os.chdir('pygmtools/c_astar')

try:
os.system("python a_star_setup.py build_ext --inplace")
os.system("python c_astar_setup.py build_ext --inplace")
except:
os.system("python3 a_star_setup.py build_ext --inplace")
os.system("python3 c_astar_setup.py build_ext --inplace")

current_dir = os.getcwd()

Expand Down
8 changes: 4 additions & 4 deletions pygmtools/pytorch_backend.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
from .pytorch_astar_modules import GCNConv, AttentionModule, TensorNetworkModule, GraphPair, \
VERY_LARGE_INT, to_dense_adj, to_dense_batch, default_parameter, check_layer_parameter, node_metric
from torch import Tensor
from pygmtools.a_star import a_star
from pygmtools.c_astar import c_astar

#############################################
# Linear Assignment Problem Solvers #
Expand Down Expand Up @@ -930,10 +930,10 @@ def forward(self, data: GraphPair):
data.g1.nodes_num[i], data.g2.nodes_num[i])
num_nodes_1 = data.g1.nodes_num[i] + 1
num_nodes_2 = data.g2.nodes_num[i] + 1
x_pred[i][:num_nodes_1, :num_nodes_2] = self._a_star(cur_data)
x_pred[i][:num_nodes_1, :num_nodes_2] = self._astar(cur_data)
return x_pred[:, :-1, :-1]

def _a_star(self, data: GraphPair):
def _astar(self, data: GraphPair):

if self.args['cuda']:
device = "cuda" if torch.cuda.is_available() else "cpu"
Expand Down Expand Up @@ -995,7 +995,7 @@ def _a_star(self, data: GraphPair):

self.reset_cache()

x_pred, _ = a_star(
x_pred, _ = c_astar(
data, k, ns_1.cpu().numpy(), ns_2.cpu().numpy(),
self.net_prediction_cache,
self.heuristic_prediction_hun,
Expand Down
4 changes: 2 additions & 2 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ def get_property(prop, project):

class BdistWheelCommand(_bdist_wheel):
def run(self):
os.system("python pygmtools/astar/get_astar.py")
os.system("python pygmtools/c_astar/get_c_astar.py")
super().run()

def get_tag(self):
Expand All @@ -74,7 +74,7 @@ def get_tag(self):
class InstallCommand(_install):
def run(self):
try:
os.system("python pygmtools/astar/get_astar.py")
os.system("python pygmtools/c_astar/get_c_astar.py")
except:
pass
_install.run(self)
Expand Down
2 changes: 1 addition & 1 deletion tests/test_classic_solvers.py
Original file line number Diff line number Diff line change
Expand Up @@ -210,7 +210,7 @@ def _test_classic_solver_on_linear_assignment(num_nodes1, num_nodes2, node_feat_
last_X = pygm.utils.to_numpy(_X)


# The testing function for a_star
# The testing function for astar
def _test_astar(graph_num_nodes, node_feat_dim, solver_func, matrix_params, backends):
if backends[0] != 'pytorch':
backends.insert(0, 'pytorch') # force pytorch as the reference backend
Expand Down
Loading