commit
91f76f9fec
37
pkgs/development/python-modules/coqpit/default.nix
Normal file
37
pkgs/development/python-modules/coqpit/default.nix
Normal file
@ -0,0 +1,37 @@
|
|||||||
|
{ lib
|
||||||
|
, buildPythonPackage
|
||||||
|
, fetchFromGitHub
|
||||||
|
, pytestCheckHook
|
||||||
|
}:
|
||||||
|
|
||||||
|
buildPythonPackage rec {
|
||||||
|
pname = "coqpit";
|
||||||
|
version = "0.0.6.6";
|
||||||
|
format = "setuptools";
|
||||||
|
|
||||||
|
src = fetchFromGitHub {
|
||||||
|
owner = "coqui-ai";
|
||||||
|
repo = pname;
|
||||||
|
rev = "v${version}";
|
||||||
|
sha256 = "0wb5wf84i5h4ycm732kn4316v7schhm91s2rrklfw9sny5dqmdnh";
|
||||||
|
};
|
||||||
|
|
||||||
|
checkInputs = [
|
||||||
|
pytestCheckHook
|
||||||
|
];
|
||||||
|
|
||||||
|
pythonImportsCheck = [
|
||||||
|
"coqpit"
|
||||||
|
"coqpit.coqpit"
|
||||||
|
];
|
||||||
|
|
||||||
|
meta = with lib; {
|
||||||
|
description = "Simple but maybe too simple config management through python data classes";
|
||||||
|
longDescription = ''
|
||||||
|
Simple, light-weight and no dependency config handling through python data classes with to/from JSON serialization/deserialization.
|
||||||
|
'';
|
||||||
|
homepage = "https://github.com/coqui-ai/coqpit";
|
||||||
|
license = licenses.mit;
|
||||||
|
maintainers = with maintainers; [ hexa mic92 ];
|
||||||
|
};
|
||||||
|
}
|
@ -65,6 +65,6 @@ buildPythonApplication rec {
|
|||||||
homepage = "https://github.com/bootphon/phonemizer";
|
homepage = "https://github.com/bootphon/phonemizer";
|
||||||
description = "Simple text to phones converter for multiple languages";
|
description = "Simple text to phones converter for multiple languages";
|
||||||
license = licenses.gpl3;
|
license = licenses.gpl3;
|
||||||
maintainers = with maintainers; [ hexa ];
|
maintainers = with maintainers; [ ];
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -42,6 +42,6 @@ buildPythonPackage rec {
|
|||||||
description = "Unicode Standard tokenization routines and orthography profile segmentation";
|
description = "Unicode Standard tokenization routines and orthography profile segmentation";
|
||||||
homepage = "https://github.com/cldf/segments";
|
homepage = "https://github.com/cldf/segments";
|
||||||
license = licenses.asl20;
|
license = licenses.asl20;
|
||||||
maintainers = with maintainers; [ hexa ];
|
maintainers = with maintainers; [ ];
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -12,25 +12,26 @@
|
|||||||
#
|
#
|
||||||
# If you upgrade from an old version you may have to delete old models from ~/.local/share/tts
|
# If you upgrade from an old version you may have to delete old models from ~/.local/share/tts
|
||||||
# Also note that your tts version might not support all available models so check:
|
# Also note that your tts version might not support all available models so check:
|
||||||
# https://github.com/coqui-ai/TTS/releases/tag/v0.0.13
|
# https://github.com/coqui-ai/TTS/releases/tag/v0.0.14
|
||||||
#
|
#
|
||||||
# For now, for deployment check the systemd unit in the pull request:
|
# For now, for deployment check the systemd unit in the pull request:
|
||||||
# https://github.com/NixOS/nixpkgs/pull/103851#issue-521121136
|
# https://github.com/NixOS/nixpkgs/pull/103851#issue-521121136
|
||||||
|
|
||||||
python3Packages.buildPythonApplication rec {
|
python3Packages.buildPythonApplication rec {
|
||||||
pname = "tts";
|
pname = "tts";
|
||||||
version = "0.0.13";
|
version = "0.0.14";
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = fetchFromGitHub {
|
||||||
owner = "coqui-ai";
|
owner = "coqui-ai";
|
||||||
repo = "TTS";
|
repo = "TTS";
|
||||||
rev = "v${version}";
|
rev = "v${version}";
|
||||||
sha256 = "1sh7sjkh7ihbkqc7sl4hnzci0n7gv4s140dykpb1havaqyfhjn8l";
|
sha256 = "0cl0ri90mx0y19fmqww73lp5nv6qkpc45rm4157i7p6q6llajdhp";
|
||||||
};
|
};
|
||||||
|
|
||||||
preBuild = ''
|
postPatch = ''
|
||||||
sed -i -e 's!librosa==[^"]*!librosa!' requirements.txt
|
sed -i -e 's!librosa==[^"]*!librosa!' requirements.txt
|
||||||
sed -i -e 's!unidecode==[^"]*!unidecode!' requirements.txt
|
sed -i -e 's!unidecode==[^"]*!unidecode!' requirements.txt
|
||||||
|
sed -i -e 's!numba==[^"]*!numba!' requirements.txt
|
||||||
sed -i -e 's!numpy==[^"]*!numpy!' requirements.txt
|
sed -i -e 's!numpy==[^"]*!numpy!' requirements.txt
|
||||||
sed -i -e 's!umap-learn==[^"]*!umap-learn!' requirements.txt
|
sed -i -e 's!umap-learn==[^"]*!umap-learn!' requirements.txt
|
||||||
'';
|
'';
|
||||||
@ -40,14 +41,15 @@ python3Packages.buildPythonApplication rec {
|
|||||||
];
|
];
|
||||||
|
|
||||||
propagatedBuildInputs = with python3Packages; [
|
propagatedBuildInputs = with python3Packages; [
|
||||||
|
coqpit
|
||||||
flask
|
flask
|
||||||
gdown
|
gdown
|
||||||
inflect
|
inflect
|
||||||
jieba
|
jieba
|
||||||
librosa
|
librosa
|
||||||
matplotlib
|
matplotlib
|
||||||
|
numba
|
||||||
pandas
|
pandas
|
||||||
phonemizer
|
|
||||||
pypinyin
|
pypinyin
|
||||||
pysbd
|
pysbd
|
||||||
pytorch
|
pytorch
|
||||||
@ -69,6 +71,7 @@ python3Packages.buildPythonApplication rec {
|
|||||||
'';
|
'';
|
||||||
|
|
||||||
checkInputs = with python3Packages; [
|
checkInputs = with python3Packages; [
|
||||||
|
pytest-sugar
|
||||||
pytestCheckHook
|
pytestCheckHook
|
||||||
];
|
];
|
||||||
|
|
||||||
@ -77,10 +80,6 @@ python3Packages.buildPythonApplication rec {
|
|||||||
"test_torch_stft"
|
"test_torch_stft"
|
||||||
"test_stft_loss"
|
"test_stft_loss"
|
||||||
"test_multiscale_stft_loss"
|
"test_multiscale_stft_loss"
|
||||||
# assert tensor(1.1904e-07, dtype=torch.float64) <= 0
|
|
||||||
"test_parametrized_gan_dataset"
|
|
||||||
# RuntimeError: expected scalar type Double but found Float
|
|
||||||
"test_speaker_embedding"
|
|
||||||
# Requires network acccess to download models
|
# Requires network acccess to download models
|
||||||
"test_synthesize"
|
"test_synthesize"
|
||||||
];
|
];
|
||||||
@ -92,13 +91,25 @@ python3Packages.buildPythonApplication rec {
|
|||||||
|
|
||||||
# numba tries to write to HOME directory
|
# numba tries to write to HOME directory
|
||||||
export HOME=$TMPDIR
|
export HOME=$TMPDIR
|
||||||
|
|
||||||
|
for file in $(grep -rl 'python TTS/bin' tests); do
|
||||||
|
substituteInPlace "$file" \
|
||||||
|
--replace "python TTS/bin" "${python3.interpreter} $out/lib/${python3.libPrefix}/site-packages/TTS/bin"
|
||||||
|
done
|
||||||
'';
|
'';
|
||||||
|
|
||||||
disabledTestPaths = [
|
disabledTestPaths = [
|
||||||
# requires tensorflow
|
# requires tensorflow
|
||||||
"tests/test_tacotron2_tf_model.py"
|
"tests/test_tacotron2_tf_model.py"
|
||||||
"tests/test_vocoder_tf_melgan_generator.py"
|
"tests/vocoder_tests/test_vocoder_tf_pqmf.py"
|
||||||
"tests/test_vocoder_tf_pqmf.py"
|
"tests/vocoder_tests/test_vocoder_tf_melgan_generator.py"
|
||||||
|
# RuntimeError: fft: ATen not compiled with MKL support
|
||||||
|
"tests/vocoder_tests/test_fullband_melgan_train.py"
|
||||||
|
"tests/vocoder_tests/test_hifigan_train.py"
|
||||||
|
"tests/vocoder_tests/test_melgan_train.py"
|
||||||
|
"tests/vocoder_tests/test_multiband_melgan_train.py"
|
||||||
|
"tests/vocoder_tests/test_parallel_wavegan_train.py"
|
||||||
|
|
||||||
];
|
];
|
||||||
|
|
||||||
meta = with lib; {
|
meta = with lib; {
|
||||||
|
@ -1283,6 +1283,8 @@ in {
|
|||||||
|
|
||||||
connect-box = callPackage ../development/python-modules/connect_box { };
|
connect-box = callPackage ../development/python-modules/connect_box { };
|
||||||
|
|
||||||
|
coqpit = callPackage ../development/python-modules/coqpit { };
|
||||||
|
|
||||||
cerberus = callPackage ../development/python-modules/cerberus { };
|
cerberus = callPackage ../development/python-modules/cerberus { };
|
||||||
|
|
||||||
cert-chain-resolver = callPackage ../development/python-modules/cert-chain-resolver { };
|
cert-chain-resolver = callPackage ../development/python-modules/cert-chain-resolver { };
|
||||||
|
Loading…
x
Reference in New Issue
Block a user