Array
(
    [0] => stdClass Object
        (
            [journal] => stdClass Object
                (
                    [id_jnl] => 16
                )

        )

    [1] => stdClass Object
        (
            [section] => stdClass Object
                (
                    [section] => 1627
                )

        )

    [2] => stdClass Object
        (
            [title] => Array
                (
                    [0] => Class, gender and race stereotypes reproduced by generative AI: recommendations for users@en
                    [1] => Los estereotipos de clase, género y raza reproducidos por la ia generativa: recomendaciones para los usuarios@es
                )

        )

    [3] => stdClass Object
        (
            [abstract] => Array
                (
                    [0] => Objective: To establish recommendations for prosumers to make ethical use of these tools, thus avoiding results that fall into class, race and/or gender stereotypes.
Method: Analysis of documentary material developed by different entities: international institutions (UNESCO and Council of Europe) and companies developing AI tools (OpenAI, Google, Microsoft and Anthropic).
Finding: AI can perpetuate stereotypes as it uses data that reproduces a context where minorities and women are underrepresented. Major corporations claim to have developed codes of conduct and self-regulation. This governance is in line with international recommendations and governs their behaviour, protects them from liability and underpins their CSR. They include recommendations for users in the processes of using the tools.
Conclusions: It is not yet possible to completely avoid biases but it is advisable for users to detect stereotypes and minimise them. Self-regulation contributes to less discriminatory content, as well as incorporating women and minorities as developers. They propose codes of conduct to detect inappropriate content. The findings are useful to encourage a critical perspective and focus on the role of users, to whom recommendations are offered, in a process in which the use of tools is fundamental.@en
                    [1] => Objetivo: Establecer recomendaciones para que los prosumidores efectúen un uso ético de estas herramientas evitando, de esta forma, resultados que caigan en estereotipos de clase, raza y/o género.
Método: Análisis del material documental desarrollado por distintas entidades: instituciones internacionales (UNESCO y Conuncil of Europe) y empresas desarrolladoras de herramientas de IA (OpenAI, Google, Microsoft y Anthropic).
Resultado: La IA puede perpetuar estereotipos ya que emplea datos que reproducen un contexto donde las minorías y las mujeres están infrarrepresentadas. Las principales corporaciones afirman haber desarrollado códigos de conducta y autorregulación. Esta gobernanza concuerda con las recomendaciones internacionales y rige su comportamiento, los protege de responsabilidades y basa su RSC. Incluyen recomendaciones para los usuarios en los procesos de uso de las herramientas.
Conclusiones: Aún no es posible evitar totalmente los sesgos pero es recomendable que los usuarios detecten las estereotipos y los minimicen. La autorregulación contribuye a contenidos menos discriminatorios, además de incorporar a mujeres y minorías como desarrolladores. Plantean códigos de conductas para detectar contenidos inapropiados. Los hallazgos son útiles para fomentar la perspectiva crítica y poner el foco en el papel que los usuarios, a los que se ofrecen recomendaciones, tienen en un proceso en el que el uso que se haga de las herramientas resulta fundamental.Â@es
                )

        )

    [4] => stdClass Object
        (
            [author] => Array
                (
                    [0] => Andrea Castro-Martínez
                    [1] => José Luis Torres-Martín
                    [2] => Cristina Pérez-Ordóñez
                )

        )

    [5] => stdClass Object
        (
            [subject] => Array
                (
                    [0] => Inteligencia artificial@es
                    [1] => Gã©nero@es
                    [2] => Raza@es
                    [3] => Clase@es
                    [4] => Estereotipos@es
                    [5] => Inteligãªncia artificial@pt
                    [6] => Gãªnero@pt
                    [7] => Raã§a@pt
                    [8] => Classe@pt
                    [9] => Estereã³tipos@pt
                    [10] => Artificial intelligence@en
                    [11] => Gender@en
                    [12] => Race@en
                    [13] => Class@en
                    [14] => Stereotypes@en
                )

        )

    [6] => stdClass Object
        (
            [source] => stdClass Object
                (
                    [vol] => 30
                    [nr] => 
                    [year] => 2025
                    [theme] => 
                )

        )

    [7] => stdClass Object
        (
            [datePub] => Array
                (
                    [0] => 2025-03-14
                )

        )

    [8] => stdClass Object
        (
            [DOI] => Array
                (
                    [0] => stdClass Object
                        (
                            [type] => DOI
                            [value] => Array
                                (
                                    [0] => 10.5007/1518-2924.2025.e103518
                                )

                        )

                )

        )

    [9] => stdClass Object
        (
            [http] => Array
                (
                    [0] => stdClass Object
                        (
                            [type] => HTTP
                            [value] => Array
                                (
                                    [0] => https://periodicos.ufsc.br/index.php/eb/article/view/103518
                                )

                        )

                    [1] => stdClass Object
                        (
                            [type] => HTTP
                            [value] => Array
                                (
                                    [0] => https://periodicos.ufsc.br/index.php/eb/article/view/103518/59163
                                )

                        )

                    [2] => stdClass Object
                        (
                            [type] => HTTP
                            [value] => Array
                                (
                                    [0] => https://periodicos.ufsc.br/index.php/eb/article/view/103518/59044
                                )

                        )

                    [3] => stdClass Object
                        (
                            [type] => HTTP
                            [value] => Array
                                (
                                    [0] => https://periodicos.ufsc.br/index.php/eb/article/view/103518/59045
                                )

                        )

                )

        )

    [10] => stdClass Object
        (
            [language] => Array
                (
                    [0] => es
                    [1] => pt
                )

        )

    [11] => stdClass Object
        (
            [license] => Array
                (
                    [0] => Copr
                    [1] => CCBY4.0
                )

        )

)