{"id":154,"date":"2022-10-20T08:39:06","date_gmt":"2022-10-20T08:39:06","guid":{"rendered":"https:\/\/wp.lancs.ac.uk\/h-unique\/?page_id=154"},"modified":"2023-10-30T12:54:39","modified_gmt":"2023-10-30T12:54:39","slug":"research-publications","status":"publish","type":"page","link":"https:\/\/wp.lancs.ac.uk\/h-unique\/research-publications\/","title":{"rendered":"Research Publications"},"content":{"rendered":"\n<p>Here we present a summary of scientific papers and other research outputs from the H-Unique project. Full papers can be downloaded in pdf format.  Please not that these papers are technical papers intended for a specialist audience.<\/p>\n\n\n\n<hr class=\"wp-block-separator has-alpha-channel-opacity is-style-dots\" \/>\n\n\n\n<div class=\"wp-block-columns is-layout-flex wp-container-core-columns-is-layout-5c5cdacd wp-block-columns-is-layout-flex\" style=\"padding-top:var(--wp--preset--spacing--20);padding-right:var(--wp--preset--spacing--20);padding-bottom:var(--wp--preset--spacing--20);padding-left:var(--wp--preset--spacing--20)\">\n<div class=\"wp-block-column is-layout-flow wp-block-column-is-layout-flow\" style=\"border-width:1px\">\n<figure class=\"wp-block-image size-large\"><img loading=\"lazy\" decoding=\"async\" width=\"1024\" height=\"522\" src=\"https:\/\/wp.lancs.ac.uk\/h-unique\/files\/2022\/11\/baisa21multi-1024x522.png\" alt=\"\" class=\"wp-image-355\" srcset=\"https:\/\/wp.lancs.ac.uk\/h-unique\/files\/2022\/11\/baisa21multi-1024x522.png 1024w, https:\/\/wp.lancs.ac.uk\/h-unique\/files\/2022\/11\/baisa21multi-300x153.png 300w, https:\/\/wp.lancs.ac.uk\/h-unique\/files\/2022\/11\/baisa21multi-768x391.png 768w, https:\/\/wp.lancs.ac.uk\/h-unique\/files\/2022\/11\/baisa21multi-1536x782.png 1536w, https:\/\/wp.lancs.ac.uk\/h-unique\/files\/2022\/11\/baisa21multi.png 1826w\" sizes=\"auto, (max-width: 1024px) 100vw, 1024px\" \/><\/figure>\n\n\n\n<p><strong>Multi-Branch With Attention Network For Hand-Based Person Recognition <\/strong><\/p>\n\n\n\n<p><em>Nathanael L. Baisa, Bryan Williams, Hossein Rahmani, Plamen Angelov, Sue Black<\/em><\/p>\n\n\n\n<p><a href=\"#_1\" data-type=\"internal\" data-id=\"#_1\">[summary]<\/a><a rel=\"noreferrer noopener\" href=\"http:\/\/wp.lancs.ac.uk\/h-unique\/files\/2022\/11\/Baisa21Multi.pdf\" data-type=\"URL\" data-id=\"http:\/\/wp.lancs.ac.uk\/h-unique\/files\/2022\/11\/Baisa21Multi.pdf\" target=\"_blank\">[pdf]<\/a><\/p>\n<\/div>\n\n\n\n<div class=\"wp-block-column is-layout-flow wp-block-column-is-layout-flow\">\n<figure class=\"wp-block-image size-large\"><img loading=\"lazy\" decoding=\"async\" width=\"1024\" height=\"417\" src=\"https:\/\/wp.lancs.ac.uk\/h-unique\/files\/2022\/11\/vyas21robust-1024x417.png\" alt=\"\" class=\"wp-image-356\" srcset=\"https:\/\/wp.lancs.ac.uk\/h-unique\/files\/2022\/11\/vyas21robust-1024x417.png 1024w, https:\/\/wp.lancs.ac.uk\/h-unique\/files\/2022\/11\/vyas21robust-300x122.png 300w, https:\/\/wp.lancs.ac.uk\/h-unique\/files\/2022\/11\/vyas21robust-768x312.png 768w, https:\/\/wp.lancs.ac.uk\/h-unique\/files\/2022\/11\/vyas21robust.png 1062w\" sizes=\"auto, (max-width: 1024px) 100vw, 1024px\" \/><\/figure>\n\n\n\n<p><strong>Robust End-to-End Hand Identification via Holistic Multi-Unit Knuckle Recognition<\/strong><\/p>\n\n\n\n<p>Ritesh Vyas, Hossein Rahmani, Ricki Boswell-Challand, Plamen Angelov, Sue Black, Bryan M. Williams <\/p>\n\n\n\n<p><a href=\"#_2\">[summary]<\/a><a rel=\"noreferrer noopener\" href=\"http:\/\/wp.lancs.ac.uk\/h-unique\/files\/2022\/11\/Vyas21Robust.pdf\" data-type=\"URL\" data-id=\"http:\/\/wp.lancs.ac.uk\/h-unique\/files\/2022\/11\/Vyas21Robust.pdf\" target=\"_blank\">[pdf]<\/a><\/p>\n<\/div>\n\n\n\n<div class=\"wp-block-column is-layout-flow wp-block-column-is-layout-flow\" style=\"border-width:1px\">\n<figure class=\"wp-block-image size-large\"><img loading=\"lazy\" decoding=\"async\" width=\"1024\" height=\"470\" src=\"https:\/\/wp.lancs.ac.uk\/h-unique\/files\/2022\/11\/baisa22hand-1024x470.png\" alt=\"\" class=\"wp-image-357\" srcset=\"https:\/\/wp.lancs.ac.uk\/h-unique\/files\/2022\/11\/baisa22hand-1024x470.png 1024w, https:\/\/wp.lancs.ac.uk\/h-unique\/files\/2022\/11\/baisa22hand-300x138.png 300w, https:\/\/wp.lancs.ac.uk\/h-unique\/files\/2022\/11\/baisa22hand-768x352.png 768w, https:\/\/wp.lancs.ac.uk\/h-unique\/files\/2022\/11\/baisa22hand-1536x704.png 1536w, https:\/\/wp.lancs.ac.uk\/h-unique\/files\/2022\/11\/baisa22hand.png 1910w\" sizes=\"auto, (max-width: 1024px) 100vw, 1024px\" \/><\/figure>\n\n\n\n<p><strong>Hand-Based Person Identification Using Global and Part-Aware Deep Feature Representation Learning<\/strong><\/p>\n\n\n\n<p><em>Nathanael L. Baisa, Bryan Williams, Hossein Rahmani, Plamen Angelov, Sue Black<\/em><\/p>\n\n\n\n<p><a href=\"#_3\">[summary]<\/a><a rel=\"noreferrer noopener\" href=\"http:\/\/wp.lancs.ac.uk\/h-unique\/files\/2022\/11\/Baisa22Hand.pdf\" target=\"_blank\">[pdf]<\/a><\/p>\n<\/div>\n<\/div>\n\n\n\n<div class=\"wp-block-columns is-layout-flex wp-container-core-columns-is-layout-5c5cdacd wp-block-columns-is-layout-flex\" style=\"padding-top:var(--wp--preset--spacing--20);padding-right:var(--wp--preset--spacing--20);padding-bottom:var(--wp--preset--spacing--20);padding-left:var(--wp--preset--spacing--20)\">\n<div class=\"wp-block-column is-layout-flow wp-block-column-is-layout-flow\">\n<figure class=\"wp-block-image size-large\"><img loading=\"lazy\" decoding=\"async\" width=\"1024\" height=\"509\" src=\"https:\/\/wp.lancs.ac.uk\/h-unique\/files\/2022\/11\/jiang22graph-1024x509.png\" alt=\"\" class=\"wp-image-358\" srcset=\"https:\/\/wp.lancs.ac.uk\/h-unique\/files\/2022\/11\/jiang22graph-1024x509.png 1024w, https:\/\/wp.lancs.ac.uk\/h-unique\/files\/2022\/11\/jiang22graph-300x149.png 300w, https:\/\/wp.lancs.ac.uk\/h-unique\/files\/2022\/11\/jiang22graph-768x382.png 768w, https:\/\/wp.lancs.ac.uk\/h-unique\/files\/2022\/11\/jiang22graph-1536x764.png 1536w, https:\/\/wp.lancs.ac.uk\/h-unique\/files\/2022\/11\/jiang22graph.png 1616w\" sizes=\"auto, (max-width: 1024px) 100vw, 1024px\" \/><\/figure>\n\n\n\n<p><strong>Graph-Context Attention Networks for Size-Varied Deep Graph Matching<\/strong><\/p>\n\n\n\n<p>Zheheng Jiang, Hossein Rahmani, Plamen Angelov, Sue Black, Bryan M. Williams<\/p>\n\n\n\n<p><a href=\"#_4\">[summary]<\/a><a rel=\"noreferrer noopener\" href=\"http:\/\/wp.lancs.ac.uk\/h-unique\/files\/2022\/11\/Jiang22Graph.pdf\" target=\"_blank\">[pdf]<\/a><\/p>\n<\/div>\n\n\n\n<div class=\"wp-block-column is-layout-flow wp-block-column-is-layout-flow\" style=\"border-width:1px\">\n<figure class=\"wp-block-image size-large\"><img loading=\"lazy\" decoding=\"async\" width=\"1024\" height=\"318\" src=\"https:\/\/wp.lancs.ac.uk\/h-unique\/files\/2022\/11\/vyas22ensemble-1024x318.png\" alt=\"\" class=\"wp-image-359\" srcset=\"https:\/\/wp.lancs.ac.uk\/h-unique\/files\/2022\/11\/vyas22ensemble-1024x318.png 1024w, https:\/\/wp.lancs.ac.uk\/h-unique\/files\/2022\/11\/vyas22ensemble-300x93.png 300w, https:\/\/wp.lancs.ac.uk\/h-unique\/files\/2022\/11\/vyas22ensemble-768x238.png 768w, https:\/\/wp.lancs.ac.uk\/h-unique\/files\/2022\/11\/vyas22ensemble-1536x477.png 1536w, https:\/\/wp.lancs.ac.uk\/h-unique\/files\/2022\/11\/vyas22ensemble.png 1746w\" sizes=\"auto, (max-width: 1024px) 100vw, 1024px\" \/><\/figure>\n\n\n\n<p><strong>Ensemble-Based Bounding Box Regression for Enhanced Knuckle Localization<\/strong><\/p>\n\n\n\n<p>Ritesh Vyas, Bryan M. Williams, Hossein Rahmani, Ricki Boswell-Challand, Zheheng Jiang, Plamen Angelov, Sue Black<\/p>\n\n\n\n<p><a href=\"#_5\">[summary]<\/a><a rel=\"noreferrer noopener\" href=\"http:\/\/wp.lancs.ac.uk\/h-unique\/files\/2022\/11\/Vyas22Ensemble.pdf\" target=\"_blank\">[pdf]<\/a><\/p>\n<\/div>\n\n\n\n<div class=\"wp-block-column is-layout-flow wp-block-column-is-layout-flow\" style=\"border-style:none;border-width:0px\">\n<figure class=\"wp-block-image size-full\"><img loading=\"lazy\" decoding=\"async\" width=\"394\" height=\"105\" src=\"https:\/\/wp.lancs.ac.uk\/h-unique\/files\/2023\/10\/3D_hands.jpg\" alt=\"\" class=\"wp-image-498\" srcset=\"https:\/\/wp.lancs.ac.uk\/h-unique\/files\/2023\/10\/3D_hands.jpg 394w, https:\/\/wp.lancs.ac.uk\/h-unique\/files\/2023\/10\/3D_hands-300x80.jpg 300w\" sizes=\"auto, (max-width: 394px) 100vw, 394px\" \/><\/figure>\n\n\n\n<p><strong>A Probabilistic Attention Model with Occlusion-aware Texture Regression for 3D Hand Reconstruction from a Single RGB Image<\/strong><\/p>\n\n\n\n<p>Jiang Z, Rahmani H, Black S, Williams BM<\/p>\n\n\n\n<p><a href=\"#_6\">[summary]<\/a><a href=\"http:\/\/wp.lancs.ac.uk\/h-unique\/files\/2023\/10\/Jiang23Probabilistic.pdf\" target=\"_blank\" rel=\"noreferrer noopener\">[pdf]<\/a><\/p>\n<\/div>\n<\/div>\n\n\n\n<hr class=\"wp-block-separator has-alpha-channel-opacity is-style-dots\" \/>\n\n\n\n<h5 class=\"wp-block-heading\" id=\"_1\">1. Multi-Branch With Attention Network For Hand Based Person Recognition<\/h5>\n\n\n\n<p>This paper describes an approach to identifying matching hands in a hand image dataset using an existing convolutional neural network (CNN) with additional attention network (an) layer intended to improve the discrimination of relevant areas of detail in hand images results in improved accuracy.<\/p>\n\n\n\n<p>Citation: Baisa NL, Williams B, Rahmani H, Angelov P, Black S. Multi-Branch with Attention Network for Hand-Based Person Recognition. arXiv preprint arXiv:2108.02234. 2021 Aug 4.<\/p>\n\n\n\n<h5 class=\"wp-block-heading\" id=\"_2\">2. Robust End-to-End Hand Identification via Holistic Multi-Unit Knuckle Recognition<\/h5>\n\n\n\n<p>This paper describes a novel approach to identification using knuckle crease patterns in hand images by identifying and comparing all visible knuckles (distal interphalangeal, proximal interphalangeal and metacarpopha-<br>langeal joints &#8211; referred to as major, minor and base knuckles) rather than just one knuckle.<\/p>\n\n\n\n<p>Knuckle regions are first identified in the image using a trained region based CNN. After checking for quality of the extraction results, discerning features of each region are extracted and evaluated before using these to attempt to identify matching hand images.  Evaluated on two large landmark datasets, the proposed framework achieves equal error rates (EER) of 1.0-1.9%, rank-1 accuracies of 99.3-100%<\/p>\n\n\n\n<p>Alongside the paper, an <a rel=\"noreferrer noopener\" href=\"https:\/\/h-unique.lancaster.ac.uk\/performance\/\" data-type=\"URL\" data-id=\"https:\/\/h-unique.lancaster.ac.uk\/performance\/\" target=\"_blank\">online tool<\/a> is also published, which allows the exploration of accuracy results obtained from various knuckle combinations.<\/p>\n\n\n\n<p>Citation: Vyas R, Rahmani H, Boswell-Challand R, Angelov P, Black S, Williams BM. Robust end-to-end hand identification via holistic multi-unit knuckle recognition. In 2021 IEEE International Joint Conference on Biometrics (IJCB) 2021 Aug 4 (pp. 1-8). IEEE.<\/p>\n\n\n\n<h5 class=\"wp-block-heading\" id=\"_3\">3. Hand-Based Person Identification Using Global and Part-Aware Deep Feature Representation Learning<\/h5>\n\n\n\n<p>This paper explores the use of a mixed global and part-aware CNN which works on both global and local image features for matching of hands in a dataset. <\/p>\n\n\n\n<p>Citation: Baisa NL, Williams B, Rahmani H, Angelov P, Black S. Hand-based person identification using global and part-aware deep feature representation learning. In 2022 Eleventh International Conference on Image Processing Theory, Tools and Applications (IPTA) 2022 Apr 19 (pp. 1-6). IEEE.<\/p>\n\n\n\n<h5 class=\"wp-block-heading\" id=\"_4\">4. Graph-Context Attention Networks for Size-Varied Deep Graph Matching<\/h5>\n\n\n\n<p>This paper describes a novel new approach to the identification and extraction of lineear features in images as graph of connected nodes.  This has particular relevance for the mapping of hand veins, where the connections between vein segments are a key factor in identification.<\/p>\n\n\n\n<p>Citation: Jiang Z, Rahmani H, Angelov P, Black S, Williams BM. Graph-Context Attention Networks for Size-Varied Deep Graph Matching. In Proceedings of the IEEE\/CVF Conference on Computer Vision and Pattern Recognition 2022 (pp. 2343-2352).<\/p>\n\n\n\n<h5 class=\"wp-block-heading\" id=\"_5\">5. Ensemble-Based Bounding Box Regression for Enhanced Knuckle Localization<\/h5>\n\n\n\n<p>In this paper we present an enhanced method for the robust and accurate location of knuckle regions in hand images using multiple region based CNNs (R-CNN) to identify knuckle regions and corrobarate each other&#8217;s results to provide superior localisation accuracy and classify the quality and usability of regions for biometric analysis<\/p>\n\n\n\n<p>Citation: Vyas R, Williams BM, Rahmani H, Boswell-Challand R, Jiang Z, Angelov P, Black S. Ensemble-Based Bounding Box Regression for Enhanced Knuckle Localization. Sensors. 2022 Feb 17;22(4):1569.<\/p>\n\n\n\n<h5 class=\"wp-block-heading\" id=\"_6\">6. A Probabilistic Attention Model with Occlusion-aware Texture Regression for 3D Hand Reconstruction from a Single RGB Image<\/h5>\n\n\n\n<p>Citation: Jiang, Zheheng, Hossein Rahmani, Sue Black, and Bryan M Williams. &#8220;A Probabilistic Attention Model with Occlusion-aware Texture Regression for 3D Hand Reconstruction from a Single RGB Image.&#8221; 2023.<\/p>\n","protected":false},"excerpt":{"rendered":"<p>Here we present a summary of scientific papers and other research outputs from the H-Unique project. Full papers can be downloaded in pdf format. Please not that these papers are technical papers intended for a specialist audience. Multi-Branch With Attention Network For Hand-Based Person Recognition Nathanael L. Baisa, Bryan Williams, Hossein Rahmani, Plamen Angelov, Sue [&hellip;]<\/p>\n","protected":false},"author":1516,"featured_media":0,"parent":0,"menu_order":0,"comment_status":"closed","ping_status":"closed","template":"","meta":{"site-sidebar-layout":"default","site-content-layout":"","ast-site-content-layout":"default","site-content-style":"default","site-sidebar-style":"default","ast-global-header-display":"","ast-banner-title-visibility":"","ast-main-header-display":"","ast-hfb-above-header-display":"","ast-hfb-below-header-display":"","ast-hfb-mobile-header-display":"","site-post-title":"","ast-breadcrumbs-content":"","ast-featured-img":"","footer-sml-layout":"","ast-disable-related-posts":"","theme-transparent-header-meta":"","adv-header-id-meta":"","stick-header-meta":"","header-above-stick-meta":"","header-main-stick-meta":"","header-below-stick-meta":"","astra-migrate-meta-layouts":"set","ast-page-background-enabled":"default","ast-page-background-meta":{"desktop":{"background-color":"var(--ast-global-color-4)","background-image":"","background-repeat":"repeat","background-position":"center center","background-size":"auto","background-attachment":"scroll","background-type":"","background-media":"","overlay-type":"","overlay-color":"","overlay-opacity":"","overlay-gradient":""},"tablet":{"background-color":"","background-image":"","background-repeat":"repeat","background-position":"center center","background-size":"auto","background-attachment":"scroll","background-type":"","background-media":"","overlay-type":"","overlay-color":"","overlay-opacity":"","overlay-gradient":""},"mobile":{"background-color":"","background-image":"","background-repeat":"repeat","background-position":"center center","background-size":"auto","background-attachment":"scroll","background-type":"","background-media":"","overlay-type":"","overlay-color":"","overlay-opacity":"","overlay-gradient":""}},"ast-content-background-meta":{"desktop":{"background-color":"var(--ast-global-color-5)","background-image":"","background-repeat":"repeat","background-position":"center center","background-size":"auto","background-attachment":"scroll","background-type":"","background-media":"","overlay-type":"","overlay-color":"","overlay-opacity":"","overlay-gradient":""},"tablet":{"background-color":"var(--ast-global-color-5)","background-image":"","background-repeat":"repeat","background-position":"center center","background-size":"auto","background-attachment":"scroll","background-type":"","background-media":"","overlay-type":"","overlay-color":"","overlay-opacity":"","overlay-gradient":""},"mobile":{"background-color":"var(--ast-global-color-5)","background-image":"","background-repeat":"repeat","background-position":"center center","background-size":"auto","background-attachment":"scroll","background-type":"","background-media":"","overlay-type":"","overlay-color":"","overlay-opacity":"","overlay-gradient":""}},"footnotes":""},"class_list":["post-154","page","type-page","status-publish","hentry"],"jetpack_sharing_enabled":true,"_links":{"self":[{"href":"https:\/\/wp.lancs.ac.uk\/h-unique\/wp-json\/wp\/v2\/pages\/154","targetHints":{"allow":["GET"]}}],"collection":[{"href":"https:\/\/wp.lancs.ac.uk\/h-unique\/wp-json\/wp\/v2\/pages"}],"about":[{"href":"https:\/\/wp.lancs.ac.uk\/h-unique\/wp-json\/wp\/v2\/types\/page"}],"author":[{"embeddable":true,"href":"https:\/\/wp.lancs.ac.uk\/h-unique\/wp-json\/wp\/v2\/users\/1516"}],"replies":[{"embeddable":true,"href":"https:\/\/wp.lancs.ac.uk\/h-unique\/wp-json\/wp\/v2\/comments?post=154"}],"version-history":[{"count":27,"href":"https:\/\/wp.lancs.ac.uk\/h-unique\/wp-json\/wp\/v2\/pages\/154\/revisions"}],"predecessor-version":[{"id":503,"href":"https:\/\/wp.lancs.ac.uk\/h-unique\/wp-json\/wp\/v2\/pages\/154\/revisions\/503"}],"wp:attachment":[{"href":"https:\/\/wp.lancs.ac.uk\/h-unique\/wp-json\/wp\/v2\/media?parent=154"}],"curies":[{"name":"wp","href":"https:\/\/api.w.org\/{rel}","templated":true}]}}