Skip to content

Commit

Permalink
Delete unused variables
Browse files Browse the repository at this point in the history
  • Loading branch information
ylzz1997 committed Jun 27, 2023
1 parent d17df17 commit cd5492a
Show file tree
Hide file tree
Showing 10 changed files with 2 additions and 21 deletions.
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -156,6 +156,7 @@ filelists/test.txt
filelists/train.txt
filelists/val.txt
.idea/
.vscode/
.idea/modules.xml
.idea/so-vits-svc.iml
.idea/vcs.xml
Expand All @@ -168,3 +169,4 @@ pretrain/vec-256-layer-9.onnx
pretrain/vec-256-layer-12.onnx
pretrain/vec-768-layer-9.onnx
.vscode/launch.json
.ruff.toml
4 changes: 0 additions & 4 deletions .ruff.toml

This file was deleted.

6 changes: 0 additions & 6 deletions .vscode/extensions.json

This file was deleted.

1 change: 0 additions & 1 deletion diffusion/data_loaders.py
Original file line number Diff line number Diff line change
Expand Up @@ -131,7 +131,6 @@ def __init__(
with open(filelists,"r") as f:
self.paths = f.read().splitlines()
for name_ext in tqdm(self.paths, total=len(self.paths)):
os.path.splitext(name_ext)[0]
path_audio = name_ext
duration = librosa.get_duration(filename = path_audio, sr = self.sample_rate)

Expand Down
3 changes: 0 additions & 3 deletions diffusion/diffusion_onnx.py
Original file line number Diff line number Diff line change
Expand Up @@ -581,9 +581,6 @@ def forward(self, condition=None, init_noise=None, pndms=None, k_step=None):
plms_noise_stage = torch.tensor(0, dtype=torch.long, device=device)
noise_list = torch.zeros((0, 1, 1, self.mel_bins, n_frames), device=device)

ot = step_range[0]
torch.full((1,), ot, device=device, dtype=torch.long)

for t in step_range:
t_1 = torch.full((1,), t, device=device, dtype=torch.long)
noise_pred = self.denoise_fn(x, t_1, cond)
Expand Down
1 change: 0 additions & 1 deletion diffusion/dpm_solver_pytorch.py
Original file line number Diff line number Diff line change
Expand Up @@ -557,7 +557,6 @@ def dpm_solver_first_update(self, x, s, t, model_s=None, return_intermediate=Fal
x_t: A pytorch tensor. The approximated solution at time `t`.
"""
ns = self.noise_schedule
x.dim()
lambda_s, lambda_t = ns.marginal_lambda(s), ns.marginal_lambda(t)
h = lambda_t - lambda_s
log_alpha_s, log_alpha_t = ns.marginal_log_mean_coeff(s), ns.marginal_log_mean_coeff(t)
Expand Down
1 change: 0 additions & 1 deletion diffusion/unit2mel.py
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,6 @@ def init_spkmix(self, n_spk):
hubert_hidden_size = self.input_channel
n_frames = 10
hubert = torch.randn((1, n_frames, hubert_hidden_size))
torch.arange(end=n_frames).unsqueeze(0).long()
f0 = torch.randn((1, n_frames))
volume = torch.randn((1, n_frames))
spks = {}
Expand Down
1 change: 0 additions & 1 deletion onnx_export_speaker_mix.py
Original file line number Diff line number Diff line change
Expand Up @@ -130,7 +130,6 @@ def main():
"Characters": spklist
}

json.dumps(MoeVSConf)
with open(f"checkpoints/{path}.json", 'w') as MoeVsConfFile:
json.dump(MoeVSConf, MoeVsConfFile, indent = 4)

Expand Down
2 changes: 0 additions & 2 deletions vdecoder/hifigan/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -201,8 +201,6 @@ def forward(self, f0):
output uv: tensor(batchsize=1, length, 1)
"""
with torch.no_grad():
torch.zeros(f0.shape[0], f0.shape[1], self.dim,
device=f0.device)
# fundamental component
fn = torch.multiply(f0, torch.FloatTensor([[range(1, self.harmonic_num + 2)]]).to(f0.device))

Expand Down
2 changes: 0 additions & 2 deletions vdecoder/hifiganwithsnake/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -214,8 +214,6 @@ def forward(self, f0):
output uv: tensor(batchsize=1, length, 1)
"""
with torch.no_grad():
torch.zeros(f0.shape[0], f0.shape[1], self.dim,
device=f0.device)
# fundamental component
fn = torch.multiply(f0, torch.FloatTensor([[range(1, self.harmonic_num + 2)]]).to(f0.device))

Expand Down

0 comments on commit cd5492a

Please sign in to comment.