{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2026,3,6]],"date-time":"2026-03-06T13:29:06Z","timestamp":1772803746064,"version":"3.50.1"},"reference-count":12,"publisher":"Institute of Electrical and Electronics Engineers (IEEE)","issue":"3","license":[{"start":{"date-parts":[[2019,9,1]],"date-time":"2019-09-01T00:00:00Z","timestamp":1567296000000},"content-version":"vor","delay-in-days":0,"URL":"https:\/\/ieeexplore.ieee.org\/Xplorehelp\/downloads\/license-information\/IEEE.html"},{"start":{"date-parts":[[2019,9,1]],"date-time":"2019-09-01T00:00:00Z","timestamp":1567296000000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-009"},{"start":{"date-parts":[[2019,9,1]],"date-time":"2019-09-01T00:00:00Z","timestamp":1567296000000},"content-version":"stm-asf","delay-in-days":0,"URL":"https:\/\/doi.org\/10.15223\/policy-001"}],"content-domain":{"domain":[],"crossmark-restriction":false},"short-container-title":["IEEE Robot. Automat. Mag."],"published-print":{"date-parts":[[2019,9]]},"DOI":"10.1109\/mra.2019.2926996","type":"journal-article","created":{"date-parts":[[2019,9,5]],"date-time":"2019-09-05T19:40:22Z","timestamp":1567712422000},"page":"11-13","source":"Crossref","is-referenced-by-count":23,"title":["Unintended Consequences of Biased Robotic and Artificial Intelligence Systems [Ethical, Legal, and Societal Issues]"],"prefix":"10.1109","volume":"26","author":[{"given":"Ludovic","family":"Righetti","sequence":"first","affiliation":[]},{"given":"Raj","family":"Madhavan","sequence":"additional","affiliation":[]},{"given":"Raja","family":"Chatila","sequence":"additional","affiliation":[]}],"member":"263","reference":[{"key":"ref8","article-title":"Inherent trade-offs in the fair determination of risk scores","author":"kleinberg","year":"2016"},{"key":"ref12","year":"0","journal-title":"Ethics in action"},{"key":"ref7","author":"han","year":"2014","journal-title":"Age gender and race estimation from unconstrained face images"},{"key":"ref9","author":"whittaker","year":"2018","journal-title":"AI now report"},{"key":"ref4","doi-asserted-by":"publisher","DOI":"10.1515\/popets-2015-0007"},{"key":"ref3","article-title":"Machine bias: There&#x2019;s software used across the country to predict future criminals. And it&#x2019;s biased against blacks","author":"angwin","year":"2016","journal-title":"ProPublica"},{"key":"ref6","doi-asserted-by":"publisher","DOI":"10.1007\/s11948-017-9975-2"},{"key":"ref11","article-title":"Predictive inequity in object detection","author":"wilson","year":"2019"},{"key":"ref5","author":"myers west","year":"2019","journal-title":"Discriminating systems Gender race and power in AI"},{"key":"ref10","author":"dastin","year":"2018","journal-title":"Amazon scraps secret AI recruiting tool that showed bias against women"},{"key":"ref2","doi-asserted-by":"publisher","DOI":"10.1145\/3306618.3314244"},{"key":"ref1","article-title":"Google &#x2018;fixed&#x2019; its racist algorithm by removing gorillas from its image-labeling tech","author":"vincent","year":"2018","journal-title":"The Verge"}],"container-title":["IEEE Robotics &amp; Automation Magazine"],"original-title":[],"link":[{"URL":"http:\/\/xplorestaging.ieee.org\/ielx7\/100\/8825926\/08825881.pdf?arnumber=8825881","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2023,4,27]],"date-time":"2023-04-27T22:24:25Z","timestamp":1682634265000},"score":1,"resource":{"primary":{"URL":"https:\/\/ieeexplore.ieee.org\/document\/8825881\/"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2019,9]]},"references-count":12,"journal-issue":{"issue":"3"},"URL":"https:\/\/doi.org\/10.1109\/mra.2019.2926996","relation":{},"ISSN":["1070-9932","1558-223X"],"issn-type":[{"value":"1070-9932","type":"print"},{"value":"1558-223X","type":"electronic"}],"subject":[],"published":{"date-parts":[[2019,9]]}}}