Skip to content

Commit 6029733

Browse files
committed
rename the python package for pypi
1 parent b62956b commit 6029733

File tree

12 files changed

+52
-46
lines changed

12 files changed

+52
-46
lines changed

ConFIG/grad_operator.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -30,8 +30,8 @@ def ConFIG_update_double(grad_1:torch.Tensor,grad_2:torch.Tensor,
3030
3131
Examples:
3232
```python
33-
from ConFIG.grad_operator import ConFIG_update_double
34-
from ConFIG.utils import get_gradient_vector,apply_gradient_vector
33+
from conflictfree.grad_operator import ConFIG_update_double
34+
from conflictfree.utils import get_gradient_vector,apply_gradient_vector
3535
optimizer=torch.Adam(network.parameters(),lr=1e-3)
3636
for input_i in dataset:
3737
grads=[] # we record gradients rather than losses
@@ -93,8 +93,8 @@ def ConFIG_update(
9393
9494
Examples:
9595
```python
96-
from ConFIG.grad_operator import ConFIG_update
97-
from ConFIG.utils import get_gradient_vector,apply_gradient_vector
96+
from conflictfree.grad_operator import ConFIG_update
97+
from conflictfree.utils import get_gradient_vector,apply_gradient_vector
9898
optimizer=torch.Adam(network.parameters(),lr=1e-3)
9999
for input_i in dataset:
100100
grads=[] # we record gradients rather than losses
@@ -191,8 +191,8 @@ class ConFIGOperator(GradientOperator):
191191
192192
Examples:
193193
```python
194-
from ConFIG.grad_operator import ConFIGOperator
195-
from ConFIG.utils import get_gradient_vector,apply_gradient_vector
194+
from conflictfree.grad_operator import ConFIGOperator
195+
from conflictfree.utils import get_gradient_vector,apply_gradient_vector
196196
optimizer=torch.Adam(network.parameters(),lr=1e-3)
197197
operator=ConFIGOperator() # initialize operator
198198
for input_i in dataset:

ConFIG/momentum_operator.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,9 @@
11
from torch import Tensor
22
from torch.nn.modules import Module
3-
from ConFIG import Sequence
4-
from ConFIG.grad_operator import ConFIGOperator, GradientOperator
5-
from ConFIG.utils import Sequence
6-
from ConFIG.loss_recorder import LatestLossRecorder, LossRecorder
3+
from conflictfree import Sequence
4+
from conflictfree.grad_operator import ConFIGOperator, GradientOperator
5+
from conflictfree.utils import Sequence
6+
from conflictfree.loss_recorder import LatestLossRecorder, LossRecorder
77
from . import *
88
from .utils import *
99
from .loss_recorder import *
@@ -152,8 +152,8 @@ class PseudoMomentumOperator(MomentumOperator):
152152
153153
Examples:
154154
```python
155-
from ConFIG.momentum_operator import PseudoMomentumOperator
156-
from ConFIG.utils import get_gradient_vector,apply_gradient_vector
155+
from conflictfree.momentum_operator import PseudoMomentumOperator
156+
from conflictfree.utils import get_gradient_vector,apply_gradient_vector
157157
optimizer=torch.Adam(network.parameters(),lr=1e-3)
158158
operator=PseudoMomentumOperator(num_vector=len(loss_fns)) # initialize operator, the only difference here is we need to specify the number of gradient vectors.
159159
for input_i in dataset:

README.md

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -65,8 +65,10 @@ url={https://arxiv.org/abs/2408.11104},
6565

6666
## Installation
6767

68-
* Install through `pip`: `pip install git+https://github.com/tum-pbs/ConFIG`
69-
* Install locally: Download and run `pip install .` or `install.sh` in terminal.
68+
* Install through `pip`: `pip install conflictfree`
69+
* Install from repository online: `pip install git+https://github.com/tum-pbs/ConFIG`
70+
* Install from repository offline: Download the repository and run `pip install .` or `install.sh` in terminal.
71+
* Install from released wheel: Download the wheel and run `pip install conflictfree-x.x.x-py3-none-any.whl` in terminal.
7072

7173
## Usage
7274

@@ -88,8 +90,8 @@ for input_i in dataset:
8890
With `ConFIG`:
8991

9092
```python
91-
from ConFIG.grad_operator import ConFIG_update
92-
from ConFIG.utils import get_gradient_vector,apply_gradient_vector
93+
from conflictfree.grad_operator import ConFIG_update
94+
from conflictfree.utils import get_gradient_vector,apply_gradient_vector
9395
optimizer=torch.Adam(network.parameters(),lr=1e-3)
9496
for input_i in dataset:
9597
grads=[]

docs/examples/mtl_toy.ipynb

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -316,7 +316,7 @@
316316
],
317317
"source": [
318318
"from tqdm import tqdm\n",
319-
"from ConFIG.grad_operator import ConFIG_update\n",
319+
"from conflictfree.grad_operator import ConFIG_update\n",
320320
"scale=[0.1,1]\n",
321321
"trajectories = [];losses = []\n",
322322
"for init in inits:\n",
@@ -397,7 +397,7 @@
397397
],
398398
"source": [
399399
"from tqdm import tqdm\n",
400-
"from ConFIG.momentum_operator import PseudoMomentumOperator\n",
400+
"from conflictfree.momentum_operator import PseudoMomentumOperator\n",
401401
"\n",
402402
"scale=[0.1,1]\n",
403403
"trajectories = [];losses = []\n",

docs/examples/pinn_burgers.ipynb

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -207,7 +207,7 @@
207207
"outputs": [],
208208
"source": [
209209
"import matplotlib.pyplot as plt\n",
210-
"from ConFIG.utils import get_para_vector,apply_para_vector\n",
210+
"from conflictfree.utils import get_para_vector,apply_para_vector\n",
211211
"\n",
212212
"\n",
213213
"class Tester():\n",
@@ -364,8 +364,8 @@
364364
}
365365
],
366366
"source": [
367-
"from ConFIG.grad_operator import ConFIG_update\n",
368-
"from ConFIG.utils import get_gradient_vector,apply_gradient_vector\n",
367+
"from conflictfree.grad_operator import ConFIG_update\n",
368+
"from conflictfree.utils import get_gradient_vector,apply_gradient_vector\n",
369369
"\n",
370370
"torch.manual_seed(seed)\n",
371371
"np.random.seed(seed)\n",
@@ -434,8 +434,8 @@
434434
}
435435
],
436436
"source": [
437-
"from ConFIG.utils import OrderedSliceSelector\n",
438-
"from ConFIG.momentum_operator import PseudoMomentumOperator\n",
437+
"from conflictfree.utils import OrderedSliceSelector\n",
438+
"from conflictfree.momentum_operator import PseudoMomentumOperator\n",
439439
"\n",
440440
"torch.manual_seed(seed)\n",
441441
"np.random.seed(seed)\n",

docs/start/start.md

Lines changed: 10 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -2,8 +2,10 @@
22

33
## Installation
44

5-
* Install through `pip`: `pip install git+https://github.com/tum-pbs/ConFIG`
6-
* Install locally: Download and run `pip install .` or `install.sh` in terminal.
5+
* Install through `pip`: `pip install conflictfree`
6+
* Install from repository online: `pip install git+https://github.com/tum-pbs/ConFIG`
7+
* Install from repository offline: Download the repository and run `pip install .` or `install.sh` in terminal.
8+
* Install from released wheel: Download the wheel and run `pip install conflictfree-x.x.x-py3-none-any.whl` in terminal.
79

810
## Use ConFIG method
911

@@ -23,8 +25,8 @@ for input_i in dataset:
2325
To use our ConFIG method, you can simply modify the code as
2426

2527
```python
26-
from ConFIG.grad_operator import ConFIG_update
27-
from ConFIG.utils import get_gradient_vector,apply_gradient_vector
28+
from conflictfree.grad_operator import ConFIG_update
29+
from conflictfree.utils import get_gradient_vector,apply_gradient_vector
2830
optimizer=torch.Adam(network.parameters(),lr=1e-3)
2931
for input_i in dataset:
3032
grads=[] # we record gradients rather than losses
@@ -41,8 +43,8 @@ for input_i in dataset:
4143
Or, you can use our `ConFIGOperator` class:
4244

4345
```python
44-
from ConFIG.grad_operator import ConFIGOperator
45-
from ConFIG.utils import get_gradient_vector,apply_gradient_vector
46+
from conflictfree.grad_operator import ConFIGOperator
47+
from conflictfree.utils import get_gradient_vector,apply_gradient_vector
4648
optimizer=torch.Adam(network.parameters(),lr=1e-3)
4749
operator=ConFIGOperator() # initialize operator
4850
for input_i in dataset:
@@ -64,8 +66,8 @@ The `ConFIGOperator` class and `ConFIG_update` is basically the same, you can ch
6466
The basic usage of `M-ConFIG` method in our code is similar to `ConFIGOperator` :
6567

6668
```python
67-
from ConFIG.momentum_operator import PseudoMomentumOperator
68-
from ConFIG.utils import get_gradient_vector,apply_gradient_vector
69+
from conflictfree.momentum_operator import PseudoMomentumOperator
70+
from conflictfree.utils import get_gradient_vector,apply_gradient_vector
6971
optimizer=torch.Adam(network.parameters(),lr=1e-3)
7072
operator=PseudoMomentumOperator(num_vector=len(loss_fns)) # initialize operator, the only difference here is we need to specify the number of gradient vectors.
7173
for input_i in dataset:

experiments/MTL/methods/weight_methods.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -11,9 +11,9 @@
1111

1212
from methods.min_norm_solvers import MinNormSolver, gradient_normalizers
1313

14-
from ConFIG.grad_operator import *
15-
from ConFIG.momentum_operator import *
16-
from ConFIG.helpers import OrderedSliceSelector
14+
from conflictfree.grad_operator import *
15+
from conflictfree.momentum_operator import *
16+
from conflictfree.helpers import OrderedSliceSelector
1717

1818
EPS = 1e-8 # for numerical stability
1919

experiments/PINN/README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@ The following code shows how to start a training:
1212

1313
```python
1414
from lib_pinns.burgers.trainer import *
15-
from ConFIG.grad_operator import *
15+
from conflictfree.grad_operator import *
1616

1717
SAVE_PATH="./PINN_trained/burgers/"
1818

experiments/PINN/lib_pinns/trainer_basis.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,8 +5,8 @@
55
from .helpers import *
66
from .loss_weighter import *
77

8-
from ConFIG.grad_operator import *
9-
from ConFIG.momentum_operator import *
8+
from conflictfree.grad_operator import *
9+
from conflictfree.momentum_operator import *
1010

1111
def get_cosine_constant_lambda(initial_lr,final_lr,epochs,warmup_epoch,constant_start_epoch):
1212
"""

install.sh

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
1-
pip uninstall ConFIG
1+
pip uninstall conflictfree
22
rm -rf build dist foxutils.egg-info
33
python3 setup.py sdist bdist_wheel
44
cd dist
5-
pip install ConFIG-*.whl
5+
pip install conflictfree-*.whl

0 commit comments

Comments
 (0)