Gleb Vinarskis commited on
Commit
d521794
·
1 Parent(s): fe41f06

reverting changes

Browse files
__init__.py DELETED
@@ -1,4 +0,0 @@
1
- from .modeling_custom import CustomConfig, CustomModel
2
- from .custom_pipeline import Pipeline_One
3
-
4
- __all__ = ["CustomConfig", "CustomModel", "Pipeline_One"]
 
 
 
 
 
config.json CHANGED
@@ -1,9 +1,9 @@
1
  {
2
- "_name_or_path": "Maslionok/pipeline1",
3
  "architectures": [
4
- "CustomModel"
5
  ],
6
- "model_type": "langident",
7
  "num_labels": 3,
8
  "id2label": {
9
  "0": "English",
@@ -17,9 +17,9 @@
17
  },
18
  "custom_pipelines": {
19
  "language-detection": {
20
- "impl": "custom_pipeline.Pipeline_One",
21
  "pt": [],
22
  "tf": []
23
  }
24
  }
25
- }
 
1
  {
2
+ "_name_or_path": "Maslionok/pipeline1/LID-40-3-2000000-1-4.bin",
3
  "architectures": [
4
+ "Pipeline_One"
5
  ],
6
+ "model_type": "qwen2",
7
  "num_labels": 3,
8
  "id2label": {
9
  "0": "English",
 
17
  },
18
  "custom_pipelines": {
19
  "language-detection": {
20
+ "impl": "impresso_langident_wrapper/Pipeline_One",
21
  "pt": [],
22
  "tf": []
23
  }
24
  }
25
+ }
configuration_custom.py DELETED
@@ -1,23 +0,0 @@
1
- from transformers import PretrainedConfig
2
-
3
- class CustomConfig(PretrainedConfig):
4
- model_type = "custom"
5
-
6
- def __init__(
7
- self,
8
- num_labels=3,
9
- id2label=None,
10
- label2id=None,
11
- **kwargs
12
- ):
13
- if id2label is None:
14
- id2label = {0: "English", 1: "German", 2: "French"}
15
- if label2id is None:
16
- label2id = {"English": 0, "German": 1, "French": 2}
17
-
18
- super().__init__(
19
- num_labels=num_labels,
20
- id2label=id2label,
21
- label2id=label2id,
22
- **kwargs
23
- )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
impresso_langident_wrapper.py CHANGED
@@ -1,30 +1,27 @@
1
  from transformers import Pipeline
2
  from transformers.pipelines import PIPELINE_REGISTRY
3
- from typing import Dict, List, Union
 
4
 
5
  class Pipeline_One(Pipeline):
6
- def __init__(self, model=None, tokenizer=None, **kwargs):
7
- super().__init__(model=model, tokenizer=tokenizer, **kwargs)
8
-
9
  def _sanitize_parameters(self, **kwargs):
10
- preprocess_kwargs = {}
11
- forward_kwargs = {}
12
- postprocess_kwargs = {}
13
- return preprocess_kwargs, forward_kwargs, postprocess_kwargs
14
 
15
- def preprocess(self, inputs, **kwargs) -> Dict[str, Union[str, List[str]]]:
16
- return {"text": inputs if isinstance(inputs, list) else [inputs]}
17
 
18
  def _forward(self, inputs):
19
- # Implement your model logic here
20
- return ["en"] # placeholder for actual model prediction
21
 
22
- def postprocess(self, model_outputs):
23
- return model_outputs[0] if isinstance(model_outputs, list) else model_outputs
 
24
 
25
  # Register the pipeline
26
  PIPELINE_REGISTRY.register_pipeline(
27
  "language-detection",
28
  pipeline_class=Pipeline_One,
29
- default={"model": None}
30
- )
 
1
  from transformers import Pipeline
2
  from transformers.pipelines import PIPELINE_REGISTRY
3
+
4
+
5
 
6
  class Pipeline_One(Pipeline):
 
 
 
7
  def _sanitize_parameters(self, **kwargs):
8
+ # Add any additional parameter handling if necessary
9
+ return kwargs, {}, {}
 
 
10
 
11
+ def preprocess(self, text, **kwargs):
12
+ return text
13
 
14
  def _forward(self, inputs):
15
+ model_output = self.model.predict(inputs, k=1)
16
+ return model_output
17
 
18
+ def postprocess(self, outputs, **kwargs):
19
+ return outputs
20
+
21
 
22
  # Register the pipeline
23
  PIPELINE_REGISTRY.register_pipeline(
24
  "language-detection",
25
  pipeline_class=Pipeline_One,
26
+ )
27
+
modeling_custom.py DELETED
@@ -1,18 +0,0 @@
1
- from transformers import PreTrainedModel
2
- from transformers.configuration_utils import PretrainedConfig
3
-
4
- class CustomConfig(PretrainedConfig):
5
- model_type = "langident" # Changed from "custom" to a specific type
6
-
7
- def __init__(self, **kwargs):
8
- super().__init__(**kwargs)
9
-
10
- class CustomModel(PreTrainedModel):
11
- config_class = CustomConfig
12
-
13
- def __init__(self, config):
14
- super().__init__(config)
15
-
16
- def forward(self, **kwargs):
17
- # Implement your forward pass
18
- pass