|
467 | 467 | (FunnelConfig, FunnelForQuestionAnswering),
|
468 | 468 | (LxmertConfig, LxmertForQuestionAnswering),
|
469 | 469 | (MPNetConfig, MPNetForQuestionAnswering),
|
| 470 | + ] |
| 471 | +) |
| 472 | + |
| 473 | +MODEL_FOR_TABLE_QUESTION_ANSWERING_MAPPING = OrderedDict( |
| 474 | + [ |
| 475 | + # Model for Table Question Answering mapping |
470 | 476 | (TapasConfig, TapasForQuestionAnswering),
|
471 | 477 | ]
|
472 | 478 | )
|
@@ -1384,6 +1390,106 @@ def from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwargs):
|
1384 | 1390 | )
|
1385 | 1391 |
|
1386 | 1392 |
|
| 1393 | +class AutoModelForTableQuestionAnswering: |
| 1394 | + r""" |
| 1395 | + This is a generic model class that will be instantiated as one of the model classes of the library---with a table |
| 1396 | + question answering head---when created with the when created with the |
| 1397 | + :meth:`~transformers.AutoModeForTableQuestionAnswering.from_pretrained` class method or the |
| 1398 | + :meth:`~transformers.AutoModelForTableQuestionAnswering.from_config` class method. |
| 1399 | +
|
| 1400 | + This class cannot be instantiated directly using ``__init__()`` (throws an error). |
| 1401 | + """ |
| 1402 | + |
| 1403 | + def __init__(self): |
| 1404 | + raise EnvironmentError( |
| 1405 | + "AutoModelForQuestionAnswering is designed to be instantiated " |
| 1406 | + "using the `AutoModelForTableQuestionAnswering.from_pretrained(pretrained_model_name_or_path)` or " |
| 1407 | + "`AutoModelForTableQuestionAnswering.from_config(config)` methods." |
| 1408 | + ) |
| 1409 | + |
| 1410 | + @classmethod |
| 1411 | + @replace_list_option_in_docstrings(MODEL_FOR_TABLE_QUESTION_ANSWERING_MAPPING, use_model_types=False) |
| 1412 | + def from_config(cls, config): |
| 1413 | + r""" |
| 1414 | + Instantiates one of the model classes of the library---with a table question answering head---from a |
| 1415 | + configuration. |
| 1416 | +
|
| 1417 | + Note: |
| 1418 | + Loading a model from its configuration file does **not** load the model weights. It only affects the |
| 1419 | + model's configuration. Use :meth:`~transformers.AutoModelForTableQuestionAnswering.from_pretrained` to load |
| 1420 | + the model weights. |
| 1421 | +
|
| 1422 | + Args: |
| 1423 | + config (:class:`~transformers.PretrainedConfig`): |
| 1424 | + The model class to instantiate is selected based on the configuration class: |
| 1425 | +
|
| 1426 | + List options |
| 1427 | +
|
| 1428 | + Examples:: |
| 1429 | +
|
| 1430 | + >>> from transformers import AutoConfig, AutoModelForTableQuestionAnswering |
| 1431 | + >>> # Download configuration from huggingface.co and cache. |
| 1432 | + >>> config = AutoConfig.from_pretrained('google/tapas-base-finetuned-wtq') |
| 1433 | + >>> model = AutoModelForTableQuestionAnswering.from_config(config) |
| 1434 | + """ |
| 1435 | + if type(config) in MODEL_FOR_TABLE_QUESTION_ANSWERING_MAPPING.keys(): |
| 1436 | + return MODEL_FOR_TABLE_QUESTION_ANSWERING_MAPPING[type(config)](config) |
| 1437 | + |
| 1438 | + raise ValueError( |
| 1439 | + "Unrecognized configuration class {} for this kind of AutoModel: {}.\n" |
| 1440 | + "Model type should be one of {}.".format( |
| 1441 | + config.__class__, |
| 1442 | + cls.__name__, |
| 1443 | + ", ".join(c.__name__ for c in MODEL_FOR_TABLE_QUESTION_ANSWERING_MAPPING.keys()), |
| 1444 | + ) |
| 1445 | + ) |
| 1446 | + |
| 1447 | + @classmethod |
| 1448 | + @replace_list_option_in_docstrings(MODEL_FOR_TABLE_QUESTION_ANSWERING_MAPPING) |
| 1449 | + @add_start_docstrings( |
| 1450 | + "Instantiate one of the model classes of the library---with a table question answering head---from a " |
| 1451 | + "pretrained model.", |
| 1452 | + AUTO_MODEL_PRETRAINED_DOCSTRING, |
| 1453 | + ) |
| 1454 | + def from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwargs): |
| 1455 | + r""" |
| 1456 | + Examples:: |
| 1457 | +
|
| 1458 | + >>> from transformers import AutoConfig, AutoModelForTableQuestionAnswering |
| 1459 | +
|
| 1460 | + >>> # Download model and configuration from huggingface.co and cache. |
| 1461 | + >>> model = AutoModelForTableQuestionAnswering.from_pretrained('google/tapas-base-finetuned-wtq') |
| 1462 | +
|
| 1463 | + >>> # Update configuration during loading |
| 1464 | + >>> model = AutoModelForTableQuestionAnswering.from_pretrained('google/tapas-base-finetuned-wtq', output_attentions=True) |
| 1465 | + >>> model.config.output_attentions |
| 1466 | + True |
| 1467 | +
|
| 1468 | + >>> # Loading from a TF checkpoint file instead of a PyTorch model (slower) |
| 1469 | + >>> config = AutoConfig.from_json_file('./tf_model/tapas_tf_checkpoint.json') |
| 1470 | + >>> model = AutoModelForQuestionAnswering.from_pretrained('./tf_model/tapas_tf_checkpoint.ckpt.index', from_tf=True, config=config) |
| 1471 | + """ |
| 1472 | + config = kwargs.pop("config", None) |
| 1473 | + if not isinstance(config, PretrainedConfig): |
| 1474 | + config, kwargs = AutoConfig.from_pretrained( |
| 1475 | + pretrained_model_name_or_path, return_unused_kwargs=True, **kwargs |
| 1476 | + ) |
| 1477 | + |
| 1478 | + if type(config) in MODEL_FOR_TABLE_QUESTION_ANSWERING_MAPPING.keys(): |
| 1479 | + return MODEL_FOR_TABLE_QUESTION_ANSWERING_MAPPING[type(config)].from_pretrained( |
| 1480 | + pretrained_model_name_or_path, *model_args, config=config, **kwargs |
| 1481 | + ) |
| 1482 | + |
| 1483 | + raise ValueError( |
| 1484 | + "Unrecognized configuration class {} for this kind of AutoModel: {}.\n" |
| 1485 | + "Model type should be one of {}.".format( |
| 1486 | + config.__class__, |
| 1487 | + cls.__name__, |
| 1488 | + ", ".join(c.__name__ for c in MODEL_FOR_TABLE_QUESTION_ANSWERING_MAPPING.keys()), |
| 1489 | + ) |
| 1490 | + ) |
| 1491 | + |
| 1492 | + |
1387 | 1493 | class AutoModelForTokenClassification:
|
1388 | 1494 | r"""
|
1389 | 1495 | This is a generic model class that will be instantiated as one of the model classes of the library---with a token
|
|
0 commit comments